Skip to content

Flex #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .idea/Hash-Tables.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/inspectionProfiles/profiles_settings.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions .idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

76 changes: 76 additions & 0 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

23 changes: 23 additions & 0 deletions src/chains.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import random

def longest_linked_list_chain(keys, buckets, loops=10):
"""roll keys number of keys into buckets number of random buckets and count collisions"""

for i in range(loops):
# dictionary pythongs implementation of a hashtable
key_counts = {}
for i in range(buckets):
key_counts[i] = 0
for i in range(keys):
random_key = str(random.random())
hash_index = hash(random) % buckets
key_counts[hash_index] += 1

largest_number = 0
for key in key_counts:
if key_counts[key] > largest_number:
largest_number = key_counts[key]

print(f"Longest Linked list chain for {keys} keys in {buckets} buckets (load factor: {keys/buckets:.2f}): {largest_number}")

longest_linked_list_chain(5, 100, 5)
24 changes: 24 additions & 0 deletions src/collisions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import random

def how_many_before_collisions(buckets, loops=1):
# roll random hashes indexes into buckets and p[rint how many rolls before a collision
# run loops times

for i in range(loops):
tries = 0
tried = set()

while True:
random_key = str(random.random())
hash_index = hash(random_key) % buckets
if hash_index not in tried:
tried.add(hash_index)
tries += 1

else:
# we have collision
break
# 1 decimal format
print(f"{buckets} buckets, {tries} hashes before collision. ({tries/buckets * 100:.1f}%")

how_many_before_collisions(1000, 1)
42 changes: 42 additions & 0 deletions src/dynamic_array.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
class DynamicArray:
# my_array = [4]
def __init__(self, capacity):
self.capacity = capacity
self.count = 0
self.storage = [None] * self.capacity

def insert(self, index, value):
# 2nd- make sure we have open space
if self.count >= self.capacity:
self.double_size
# 3rd- make sure index is in range
if index > self.count:
print("error out of range")
return

# shift everything over
# 4th - start with the last one, move it 1: int right
for i in range(self.count, index, -1):
self.storage[i] = self.storage[i-1]

# 1st- insert out value
self.storage[index] = value
self.count += 1

def append(self, value):
self.insert(self.count, value)

def double_size(self):
self.capacity *= 2
new_storage = [None] * self.capacity



my_array = DynamicArray(4)
my_array.insert(0, 1)
my_array.insert(0, 2)
my_array.insert(1, 3)
my_array.insert(3, 4)
my_array.insert(0, 5)
my_array.append(20)
print(my_array.storage)
26 changes: 26 additions & 0 deletions src/hashes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import hashlib

n = 10
# b turns into byte array
key = b"string"
# same as above
key2 = "string".encode()
key3 = b"lunchtime"


index = hash(key) % 8
index = hash(key2) % 8
index = hash(key3) % 8
print(index1)
print(index2)
print(index3)

# for i in range(n):
# print(hash(key))
# print(hashlib.sha256(key).hexdigest())
#
# for i in range(n):
# print(hash(key)
#
# for i in range(n):
# print(hash(key2)
36 changes: 30 additions & 6 deletions src/hashtable.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import hashlib
# '''
# Linked List hash table key/value pair
# '''
# Singly linked list -- LinkedPair'''
class LinkedPair:
def __init__(self, key, value):
self.key = key
Expand All @@ -23,7 +24,7 @@ def _hash(self, key):

You may replace the Python hash with DJB2 as a stretch goal.
'''
return hash(key)
return hashlib.sha256(key.encode())


def _hash_djb2(self, key):
Expand Down Expand Up @@ -51,7 +52,12 @@ def insert(self, key, value):

Fill this in.
'''
pass
index = self._hash_mod(key)

if self.storage[index] is not None:
print("error: key in use")
else:
self.storage[index] = value



Expand All @@ -63,7 +69,12 @@ def remove(self, key):

Fill this in.
'''
pass
index = self._hash_mod(key)

if self.storage[index] is not None:
self.storage[index] = None
else:
print('warning key not found')


def retrieve(self, key):
Expand All @@ -74,7 +85,10 @@ def retrieve(self, key):

Fill this in.
'''
pass
index = self._hash_mod(key)
# if its none it meets spec, still works
return self.storage[index]



def resize(self):
Expand All @@ -84,7 +98,17 @@ def resize(self):

Fill this in.
'''
pass


old_storage = self.storage.copy()
self.capacity = self.capacity * 2
self.storage = [None] * self.capacity

for bucket_item in old_storage:
self.insert(bucket_item)






Expand Down