常见算法汇总
binary search
def binary_search(arr, item):
low = 0
high = len(arr) - 1
while low <= high:
mid = (low + high) // 2
guess = arr[mid]
if item == guess:
return mid
if item < guess:
high = mid - 1
else:
low = mid + 1
return None
selection sort
def find_smallest(arr):
smaller = arr[0]
smaller_index = 0
for i in range(1, len(arr)):
if arr[i] < smaller:
smaller = arr[i]
smaller_index = i
return smaller_index
def selection_sort(arr):
sorted_arr = []
while arr:
smaller_index = find_smallest(arr)
sorted_arr.append(arr.pop(smaller_index))
return sorted_arr
quick sort
def quick_sort(arr):
if len(arr) < 2:
return arr
pivot = arr[0]
less = [item for item in arr[1:] if item <= pivot]
more = [item for item in arr[1:] if item > pivot]
return quick_sort(less) + [pivot] + quick_sort(more)
breadth_first_search
from collections import deque
graph = {}
graph['you'] = ['alice', 'bob', 'claire']
graph['bob'] = ['anuj', 'peggy']
graph['alice'] = ['peggy']
graph['claire'] = ['thom', 'jonny']
graph['anuj'] = []
graph['peggy'] = []
graph['thom'] = []
graph['jonny'] = []
def check(name):
return name[-1] == 'm'
def breadth_first_search(name):
search_queue = deque()
search_queue += graph[name]
searched = []
while search_queue:
person = search_queue.popleft()
if person not in searched:
if check(person):
print(person + ' is a mango seller!')
return True
search_queue += graph[person]
searched.append(person)
return False
Dijkstra’s algorithm
Four steps to Dijkstra's algorithm
- Find the “cheapest” node. This is the node you can get to in the least amount of time.
- Update the costs of the neighbors of this node.
- Repeat until you’ve done this for every node in the graph.
- Calculate the final path.
graph = {}
graph['start'] = {'a': 6, 'b': 2}
graph['a'] = {'finish': 1}
graph['b'] = {'a': 3, 'finish': 5}
graph['finish'] = {}
costs = {'a': 6, 'b': 2, 'finish': float('inf')}
processed = []
def find_lowest_cost_node(costs):
lowest_cost = float('inf')
lowest_cost_node = None
for node in costs:
if costs[node] < lowest_cost and node not in processed:
lowest_cost = costs[node]
lowest_cost_node = node
return lowest_cost_node
node = find_lowest_cost_node(costs)
while node:
neighbors = graph[node]
for item in neighbors:
new_cost = costs[node] + neighbors[item]
if new_cost < costs[item]:
costs[item] = new_cost
processed.append(node)
node = find_lowest_cost_node(costs)
print(costs['finish'])
greedy algorithm
states_needed = {'mt', 'wa', 'or', 'id', 'nv', 'ut', 'ca', 'az'}
stations = {}
stations['kone'] = {'id', 'nv', 'ut'}
stations['ktwo'] = {'wa', 'id', 'mt'}
stations['kthree'] = {'or', 'nv', 'ca'}
stations['kfour'] = {'nv', 'ut'}
stations['kfive'] = {'ca', 'az'}
final_stations = set()
while states_needed:
station_covered = set()
best_station = None
for station, states_for_station in stations.items():
covered = states_needed & states_for_station
if len(covered) > len(station_covered):
station_covered = covered
best_station = station
final_stations.add(best_station)
states_needed -= station_covered
print(final_stations)
dynamic programming
- Dynamic programming is useful when you’re trying to optimize something given a constraint.
- You can use dynamic programming when the problem can be broken into discrete subproblems.
- Every dynamic-programming solution involves a grid.
- The values in the cells are usually what you’re trying to optimize.
- Each cell is a subproblem, so think about how you can divide your problem into subproblems.
- There’s no single formula for calculating a dynamic-programming solution.