Skip to content

Commit 7842317

Browse files
committed
solve #123
1 parent b4dcdcd commit 7842317

File tree

2 files changed

+166
-0
lines changed

2 files changed

+166
-0
lines changed

src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,3 +124,4 @@ mod n0122_best_time_to_buy_and_sell_stock_ii;
124124
mod n0123_best_time_to_buy_and_sell_stock_iii;
125125
mod n0124_binary_tree_maximum_path_sum;
126126
mod n0125_valid_palindrome;
127+
mod n0126_word_ladder_ii;

src/n0126_word_ladder_ii.rs

Lines changed: 165 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,165 @@
1+
/**
2+
* [126] Word Ladder II
3+
*
4+
* Given two words (beginWord and endWord), and a dictionary's word list, find all shortest transformation sequence(s) from beginWord to endWord, such that:
5+
*
6+
* <ol>
7+
* Only one letter can be changed at a time
8+
* Each transformed word must exist in the word list. Note that beginWord is not a transformed word.
9+
* </ol>
10+
*
11+
* Note:
12+
*
13+
*
14+
* Return an empty list if there is no such transformation sequence.
15+
* All words have the same length.
16+
* All words contain only lowercase alphabetic characters.
17+
* You may assume no duplicates in the word list.
18+
* You may assume beginWord and endWord are non-empty and are not the same.
19+
*
20+
*
21+
* Example 1:
22+
*
23+
*
24+
* Input:
25+
* beginWord = "hit",
26+
* endWord = "cog",
27+
* wordList = ["hot","dot","dog","lot","log","cog"]
28+
*
29+
* Output:
30+
* [
31+
* ["hit","hot","dot","dog","cog"],
32+
* ["hit","hot","lot","log","cog"]
33+
* ]
34+
*
35+
*
36+
* Example 2:
37+
*
38+
*
39+
* Input:
40+
* beginWord = "hit"
41+
* endWord = "cog"
42+
* wordList = ["hot","dot","dog","lot","log"]
43+
*
44+
* Output: []
45+
*
46+
* Explanation: The endWord "cog" is not in wordList, therefore no possible transformation.
47+
*
48+
*
49+
*
50+
*
51+
*
52+
*/
53+
pub struct Solution {}
54+
55+
// submission codes start here
56+
57+
/*
58+
假如 A 经过一个字符的变换能得到 B, 则认为 A 与 B 之间有通路, 转化为一个 BFS 找无权图最短路径的问题
59+
60+
实现时, 可以先把图构造出来, 复杂度 O(L*N^2) (L 是字符串长度), 也可以每次都回到数组里去找连通点, 时间复杂度不变
61+
62+
由于要记录所有的路径, 因此我们需要把每个点的可能前置节点都记录下来, 最后用一个 DFS 或 BFS 找出所有路径
63+
64+
暂时想不到更好的办法
65+
*/
66+
67+
use std::collections::VecDeque;
68+
use std::collections::HashSet;
69+
impl Solution {
70+
pub fn find_ladders(begin_word: String, end_word: String, word_list: Vec<String>) -> Vec<Vec<String>> {
71+
let mut res = Vec::new();
72+
let len = word_list.len();
73+
let target = word_list.iter().position(|s| s == &end_word);
74+
if target.is_none() { return res }
75+
let target = target.unwrap();
76+
let mut deq = VecDeque::new();
77+
deq.push_back(target);
78+
// paths record the distance & previous index, we use 'len' to represent empty prev
79+
let mut paths: Vec<(i32, Vec<usize>)> = vec![(i32::max_value(), vec![]); len];
80+
paths[target].0 = 0;
81+
let mut find_shortest = false;
82+
let mut shortest = i32::max_value();
83+
let mut in_queue = HashSet::new();
84+
while let Some(i) = deq.pop_front() {
85+
if Solution::connect(&begin_word, &word_list[i]) {
86+
// complete the path using dfs
87+
if paths[i].0 > shortest { continue }
88+
Solution::dfs(i, vec![begin_word.clone()], &word_list, &paths, &mut res);
89+
shortest = paths[i].0;
90+
find_shortest = true;
91+
}
92+
// we have found the shortest path, just drain all the nodes in deq
93+
if find_shortest { continue }
94+
for j in 0..len {
95+
if j == i { continue }
96+
if Solution::connect(&word_list[i], &word_list[j]) {
97+
if paths[i].0 + 1 <= paths[j].0 {
98+
let mut prev = &mut paths[j].1;
99+
prev.push(i);
100+
paths[j].0 = paths[i].0 + 1;
101+
if !in_queue.contains(&j) {
102+
deq.push_back(j);
103+
in_queue.insert(j);
104+
}
105+
}
106+
}
107+
}
108+
}
109+
res
110+
}
111+
112+
fn dfs(curr: usize, mut path: Vec<String>, words: &Vec<String>, paths: &Vec<(i32, Vec<usize>)>, res: &mut Vec<Vec<String>>) {
113+
path.push(words[curr].clone());
114+
if paths[curr].1.is_empty() {
115+
res.push(path);
116+
return
117+
}
118+
for &prev in paths[curr].1.iter() {
119+
Solution::dfs(prev, path.clone(), words, paths, res);
120+
}
121+
}
122+
123+
#[inline(always)]
124+
fn connect(s1: &str, s2: &str) -> bool {
125+
if s1.len() != s2.len() { return false }
126+
let mut iter1 = s1.chars().into_iter();
127+
let mut iter2 = s2.chars().into_iter();
128+
let mut diff = 0;
129+
while let (Some(c1), Some(c2)) = (iter1.next(), iter2.next()) {
130+
if c1 != c2 {
131+
diff += 1;
132+
if diff >= 2 { return false }
133+
}
134+
}
135+
true
136+
}
137+
}
138+
139+
// submission codes end
140+
141+
#[cfg(test)]
142+
mod tests {
143+
use super::*;
144+
145+
#[test]
146+
fn test_126() {
147+
assert_eq!(
148+
Solution::find_ladders("hit".to_owned(), "cog".to_owned(),
149+
vec_string!["hot","dot","dog","lot","log","cog"]),
150+
vec![
151+
vec_string!["hit","hot","dot","dog","cog"],
152+
vec_string!["hit","hot","lot","log","cog"],
153+
]
154+
);
155+
assert_eq!(
156+
Solution::find_ladders("cet".to_owned(), "ism".to_owned(),
157+
vec_string!["kid","tag","pup","ail","tun","woo","erg","luz","brr","gay","sip","kay","per","val","mes","ohs","now","boa","cet","pal","bar","die","war","hay","eco","pub","lob","rue","fry","lit","rex","jan","cot","bid","ali","pay","col","gum","ger","row","won","dan","rum","fad","tut","sag","yip","sui","ark","has","zip","fez","own","ump","dis","ads","max","jaw","out","btu","ana","gap","cry","led","abe","box","ore","pig","fie","toy","fat","cal","lie","noh","sew","ono","tam","flu","mgm","ply","awe","pry","tit","tie","yet","too","tax","jim","san","pan","map","ski","ova","wed","non","wac","nut","why","bye","lye","oct","old","fin","feb","chi","sap","owl","log","tod","dot","bow","fob","for","joe","ivy","fan","age","fax","hip","jib","mel","hus","sob","ifs","tab","ara","dab","jag","jar","arm","lot","tom","sax","tex","yum","pei","wen","wry","ire","irk","far","mew","wit","doe","gas","rte","ian","pot","ask","wag","hag","amy","nag","ron","soy","gin","don","tug","fay","vic","boo","nam","ave","buy","sop","but","orb","fen","paw","his","sub","bob","yea","oft","inn","rod","yam","pew","web","hod","hun","gyp","wei","wis","rob","gad","pie","mon","dog","bib","rub","ere","dig","era","cat","fox","bee","mod","day","apr","vie","nev","jam","pam","new","aye","ani","and","ibm","yap","can","pyx","tar","kin","fog","hum","pip","cup","dye","lyx","jog","nun","par","wan","fey","bus","oak","bad","ats","set","qom","vat","eat","pus","rev","axe","ion","six","ila","lao","mom","mas","pro","few","opt","poe","art","ash","oar","cap","lop","may","shy","rid","bat","sum","rim","fee","bmw","sky","maj","hue","thy","ava","rap","den","fla","auk","cox","ibo","hey","saw","vim","sec","ltd","you","its","tat","dew","eva","tog","ram","let","see","zit","maw","nix","ate","gig","rep","owe","ind","hog","eve","sam","zoo","any","dow","cod","bed","vet","ham","sis","hex","via","fir","nod","mao","aug","mum","hoe","bah","hal","keg","hew","zed","tow","gog","ass","dem","who","bet","gos","son","ear","spy","kit","boy","due","sen","oaf","mix","hep","fur","ada","bin","nil","mia","ewe","hit","fix","sad","rib","eye","hop","haw","wax","mid","tad","ken","wad","rye","pap","bog","gut","ito","woe","our","ado","sin","mad","ray","hon","roy","dip","hen","iva","lug","asp","hui","yak","bay","poi","yep","bun","try","lad","elm","nat","wyo","gym","dug","toe","dee","wig","sly","rip","geo","cog","pas","zen","odd","nan","lay","pod","fit","hem","joy","bum","rio","yon","dec","leg","put","sue","dim","pet","yaw","nub","bit","bur","sid","sun","oil","red","doc","moe","caw","eel","dix","cub","end","gem","off","yew","hug","pop","tub","sgt","lid","pun","ton","sol","din","yup","jab","pea","bug","gag","mil","jig","hub","low","did","tin","get","gte","sox","lei","mig","fig","lon","use","ban","flo","nov","jut","bag","mir","sty","lap","two","ins","con","ant","net","tux","ode","stu","mug","cad","nap","gun","fop","tot","sow","sal","sic","ted","wot","del","imp","cob","way","ann","tan","mci","job","wet","ism","err","him","all","pad","hah","hie","aim","ike","jed","ego","mac","baa","min","com","ill","was","cab","ago","ina","big","ilk","gal","tap","duh","ola","ran","lab","top","gob","hot","ora","tia","kip","han","met","hut","she","sac","fed","goo","tee","ell","not","act","gil","rut","ala","ape","rig","cid","god","duo","lin","aid","gel","awl","lag","elf","liz","ref","aha","fib","oho","tho","her","nor","ace","adz","fun","ned","coo","win","tao","coy","van","man","pit","guy","foe","hid","mai","sup","jay","hob","mow","jot","are","pol","arc","lax","aft","alb","len","air","pug","pox","vow","got","meg","zoe","amp","ale","bud","gee","pin","dun","pat","ten","mob"]),
158+
vec![
159+
vec_string!["cet","get","gee","gte","ate","ats","its","ito","ibo","ibm","ism"],
160+
vec_string!["cet","cat","can","ian","inn","ins","its","ito","ibo","ibm","ism"],
161+
vec_string!["cet","cot","con","ion","inn","ins","its","ito","ibo","ibm","ism"],
162+
]
163+
);
164+
}
165+
}

0 commit comments

Comments
 (0)