mirror of
https://github.com/onyx-and-iris/grokking-algorithms.git
synced 2025-04-20 04:23:47 +01:00
Compare commits
No commits in common. "d552050f7ec7bfaf6f27be18c5f6ec91a67fd84f" and "f2d23203aee7f301dfd605f03302825c422adf47" have entirely different histories.
d552050f7e
...
f2d23203ae
@ -1,3 +0,0 @@
|
|||||||
# Approximation algorithm
|
|
||||||
|
|
||||||
- Easy to write, fast to run, useful for obtaining approximate solutions for NP-hard problems.
|
|
@ -1,6 +0,0 @@
|
|||||||
# Dynamic Programming
|
|
||||||
|
|
||||||
A programming technique for decomposing a problem into smaller discrete subproblems.
|
|
||||||
|
|
||||||
- Useful when trying to optimize something given a constraint.
|
|
||||||
- Example, items in a knapsack of size W that gives the greatest value.
|
|
@ -1,9 +0,0 @@
|
|||||||
# K-Nearest Neighbours
|
|
||||||
|
|
||||||
Useful for classification, regression and feature extraction. By examining a data point against its K nearest neighbours we can:
|
|
||||||
|
|
||||||
- categorize into a group
|
|
||||||
- predict responses
|
|
||||||
- convert the item into a list of features
|
|
||||||
|
|
||||||
A good starting point for machine learning.
|
|
@ -1,5 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
|
import time
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -32,10 +33,11 @@ SAMPLE_SIZE = 1000
|
|||||||
numbers = random.sample(range(LOWER, UPPER), SAMPLE_SIZE)
|
numbers = random.sample(range(LOWER, UPPER), SAMPLE_SIZE)
|
||||||
numbers.sort()
|
numbers.sort()
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
result = None
|
result = None
|
||||||
while result is None:
|
while result is None:
|
||||||
guess = random.randrange(LOWER, UPPER)
|
guess = random.randrange(LOWER, UPPER)
|
||||||
logger.debug(f"guess: {guess}")
|
logger.debug(f"guess: {guess}")
|
||||||
result = binary_search(numbers, 0, len(numbers) - 1, guess)
|
result = binary_search(numbers, 0, len(numbers) - 1, guess)
|
||||||
|
|
||||||
print(f"Found {guess} at index {result}.")
|
print(f"Found {guess} at index {result}. Running time {time.time() - start}")
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
# Shortest path for weighted graph (cost associated edges)
|
|
||||||
|
|
||||||
- Dijkstra's algorithm works when all weights are non-negative
|
|
||||||
- If there are negative weights use Bellman-Ford.
|
|
||||||
- Priority queue + min heap is optimal when compared to a function that operates on a list.
|
|
Loading…
x
Reference in New Issue
Block a user