Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Smart Learning [SmartLearning] (c) (r) (tm)
- # Copyright Dean Van Greunen 2018 to infinity, All rights Reserved.
- # Smart Learning is an Artificial Intelligence System which was designed and inspired by:
- # - The Human Brain
- # - The Deep Learning System
- # - The MatLab Programming Language
- # - Javascript Programming Language with Server Code Execution design of Node.js
- # - The Go Programming Language
- # Author: Dean Van Greunen
- # Email: deanvg9000@gmail.com
- # 027 885 5371 (for South Africa local calls Only)
- # +27 72 885 5371 (for International calls)
- class SmartLearning:
- #################
- # NN class Code # (this is a SmartLearning NN)
- #################
- # Neural Network, Simple and elegant, unconventional,
- # however it is:
- # - organized
- # - well formated
- # - easy to (while running):
- # - import
- # - export
- # - modify
- # - supports:
- # - Unbounded distance between neurons
- # - Unbounded hooks (neurons, talking to other neurons, more just means more processing) [only allowed math ops are: + - / *], if more complicated functions are need
- # then that means it will be like such,
- # [
- # {
- # "a": {
- # "b":"a+1",
- # "c":"b*0.90"
- # },
- # "d": {
- # "e":"c+1",
- # "f":"e-0.90"
- # },
- # },
- # ]
- # This NN will take a single node input from "a" and get our output for the required f in this example the value can be anything and it will evaluate to:
- # ((((a+1)*0.90)+1)-0.90)
- # let a = 2:
- # ((((2+1)*0.90)+1)-0.90) = ((((3)*0.90)+1)-0.90) = ((2.7+1)-0.90) = (3.7 - 0.90) = (2.8) = 2.8
- # after x time, it could be optimized by access to:
- # [
- # {
- # "a": {
- # "f":"(((a+1)*0.90)+1)-0.90"
- # },
- # },
- # ]
- # this access time would go from, 8 node accesses to 2 which is
- class NN:
- # format of
- # NN = [{"x": {"y"}}] or [{"x": {"y":"y+1", "y+n":"y+n*2.90"}}]
- # NN = [
- # {"x": {"y":"x*0.87"}}, # maps x to y which is mapped by (x * 0.87)
- # {"x+1": {"y":"x*0.67"}} # maps x+1 to y (not math but the Xn var...) which is mapped by (x * 0.67) as a math op using parent var...
- # ]
- # or
- # [
- # {"x": {"y":"y+1", "y+n":"y*0.90"},
- # {"x+1": {"y":"y+1", "y+n":"y*1.09"}} # maps x+1 to y (not math but the Xn var...)
- # ]
- working_neurons = [] # Working Memory, always optimized for performance, has pointers to memory_neurons as mem_x or mem_y, name for x and y are not relevant and can change,
- # as it is only a mathematical function.
- hard_memory_neurons = [] # Memory, not optimized, when saving the working neurons it is then un-optimized and then added to memory.
- def reset(self): # good for wiping / clear working memory, a.k.a temp memory, it will reset to what it was just before "waking up"
- self.neurons = [] # Clears ALL Neurons In Memory, a.k.a "Black Out"
- def wipe(self): # WARNING HARD RESET, MAKES A COMPLETELY EMPTY NN, like a brand new NN, useful if NN has gone rouge.
- self.hard_memory_neurons = [] # Clears ALl Neurons in Old
- self.reset()
- def map(self, n_x, n_y, steps=1): # Solves for Y by X using brute force in steps, if set to 1, it will do a initial rough, y = x from current memory.
- # Note with this method "learning" takes as long as a human would with the exception of speed which an electronic computing device would provide
- # Over what a biological computing device would perform at (the brain, human or not)
- # TODO: Map Properly, as [X->Y]....Must perform a search, each output is mapped to its unique input, etc... and any math logic is re-used
- pass # This mapping function is not available in this version of the code, please contact deanvg9000@gmail.com for help or support or donations for this project
- def optimize(): # this optimizes memory into working memory.
- pass # This optimiz(e/ing) function is not available in this version of the code, please contact deanvg9000@gmail.com for help or support or donations for this project
- def save(self, soft_save=true, hard_save=true, path="", filename="default.pyai-nn"):
- # Soft Load, loads only working memory.
- if soft_save:
- self.soft_save(path, filename)
- # Hard Load, Loads
- if hard_save:
- self.hard_save(path, filename)
- def soft_load(self, path="", filename="default.pyai-nn"):
- # Loads Optimized data
- f_mem_opt_data = open(path + filename + "-opt", "w+")
- f_mem_opt_data.write(toJSON(self.working_neurons))
- f_mem_opt_data.close()
- def hard_load(self, path="", filename="default.pyai-nn"):
- # Loads Complete UnOptimized Data
- f_mem_data = open(path + filename + "-mem", "w+")
- f_mem_data.write(toJSON(self.hard_memory_neurons))
- f_mem_data.close()
- def load(self, soft_load=true, hard_load=true, path="", filename="default.pyai-nn"):
- # Soft Load, loads only working memory.
- if soft_load:
- self.soft_load(path, filename)
- # Hard Load, Loads
- if hard_load:
- self.hard_load(path, filename)
- def soft_load(self, path="", filename="default.pyai-nn"):
- # Loads Optimized data
- f_mem_opt_data = open(path + filename + "-opt", "w+")
- mem_opt_data = f_mem_opt_data.read()
- f_mem_opt_data.close()
- self.working_neurons = fromJSON(mem_opt_data)
- def hard_load(self, path="", filename="default.pyai-nn"):
- # Loads Complete UnOptimized Data
- f_mem_data = open(path + filename + "-mem", "w+")
- mem_data = f_mem_data.read()
- f_mem_data.close()
- self.hard_memory_neurons = fromJSON(mem_data)
- def smartlearn(self):
- pass
- ####################
- # EVent class Code #
- ####################
- # Event, Clever way to exchange data and format it without actually changing it, just the way it is perceived.
- class Event:
- chains = [] # All events to chain with this event
- func = None # function to call after formating is complete
- formatting = None # formatting function to call, passes in data, expects data
- def init(self, func=None, formatting=None, chains=[]): # Events can be empty
- # This creates a data formating matrix, which takes the func to be called and applies the formatting as matrix which converts the data, events can be chained.
- if func:
- self.func = func
- else:
- self.func = self.ret
- if formatting:
- self.formatting = formatting
- else:
- self.formatting = self.ret
- self.chains = chains
- def bindEvent(self, event): # binds an event to chain (stack first in, first called)
- self.chains[] = event
- def call(self, data):
- if len(self.chains) >= 1: # check if there are items in the chain
- for ev in self.chains: # loop through events in chain
- if type(ev) is SmartLearning().Event(): # check if item in chain is an event else skip it
- ev.call(data) # if any Subscribing Events have chains they will all be called as well.
- self.func(self.formatting(data)) # Data is not destroyed
- def ret(self, x):
- return x
- #####################
- # Input class Code #
- #####################
- # Input which has a NN, which monitors the current environment
- class Input:
- parent_events = SmartLearning().Event()
- stream = None
- queue = []
- def init(self):
- startThread(self.notifier)
- def setStream(self, stream):
- self.stream = stream
- def bindEvent(self, event):
- self.parent_events.bindEvent(event)
- def notifier(self):
- while self.stream:
- if self.stream.canFetch():
- self.queue = self.stream.fetch()
- for i,q in self.queue:
- self.stream.pop(q)
- self.queue[i] = None
- #####################
- # Output class Code #
- #####################
- # Output which has a NN, affects the current environment
- class Output:
- parent_events = SmartLearning().Event()
- stream = None
- queue = []
- def init(self):
- startThread(self.sender)
- def setStream(self, stream):
- self.stream = stream
- def bindEvent(self, event):
- self.parent_events.bindEvent(event)
- def sender(self):
- while self.stream:
- if self.stream.canPush():
- for i,q in self.queue:
- self.stream.push(q)
- self.queue[i] = None
- ##########################
- # Environment class Code #
- ##########################
- # Environment contains input and output which both have NNs
- class Environment:
- io = None
- parent_events = SmartLearning().Event()
- ev_notify = SmartLearning().Event()
- def init(self):
- self.io = {
- "i": SmartLearning().Input(),
- "o": SmartLearning().Output()
- }
- self.ev_notify.func = self.notify
- self.io["i"].init()
- self.io["o"].init()
- self.io["i"].bindEvent(self.ev_notify)
- self.io["o"].bindEvent(self.ev_notify)
- def bindEvent(self, event):
- self.parent_events.bindEvent(event)
- def notify(self, data):
- self.parent_events.call(data)
- #########################
- # Processing class Code #
- #########################
- # Processor contains the environment, which contains input and output which both have NNs
- class Processing:
- nn = None
- environment = None
- parent_events = None
- ev_notify = None
- def init(self):
- self.nn = SmartLearning().NN()
- self.environment = SmartLearning().Environment()
- self.parent_events = SmartLearning().Event()
- self.ev_notify = SmartLearning().Event()
- self.environment.init()
- def bindIO(self, i, o):
- self.environment.io["i"].setStream(i)
- self.environment.io["o"].setStream(o)
- def bindEvent(self, event):
- self.parent_events.bindEvent(event)
- def notify(self, data):
- self.parent_events.call(data)
- ###################
- # Main class Code #
- ###################
- processor = None
- parent_events = None
- ev_notify = None
- def init(self):
- self.processor = SmartLearning().Processing()
- self.parent_events = SmartLearning().Event()
- self.ev_notify = SmartLearning().Event()
- self.processor.init()
- def bindEvent(self, event):
- self.parent_events.bindEvent(event)
- def notify(self, data):
- self.parent_events.call(data)
- def loadStreams(self, i_stream, o_stream):
- self.processor.bindIO(i_stream, o_stream)
- def boot(self): # loads hard memory only (usually done a few times during AI Up time or after hitting the kill switch)
- self.processor.hard_memory_neurons = []
- self.processor.load(soft_load=False, hard_load=False)
- self.wake()
- def wake(self): # loads working memory only.
- self.processor.working_neurons = []
- self.processor.load(soft_load=True, hard_load=False)
- def sleep(self): # slowly pauses tasks one by one from least to most important,
- # and then de-optimizes working memory and store in into hard memory,
- # then saves hard memory and clears optimied memory
- self.processor.optimize()
- self.processor.save(soft_save=True, hard_save=False)
- self.processor.working_neurons = []
- def shutdown(self): # runs sleep and then clears both hard and working memory after saving, then it shutdown the AI completely.
- # useful for debugging and going through the "AI's brain" data files
- self.sleep()
- self.processor.save(soft_save=False, hard_save=True)
- self.processor.hard_memory_neurons = []
- quit()
- pass
- def kill(self): # Crashes AI, good kill switch for a rouge AI
- quit()
- #############################################
- # USAGE DEMO #
- #############################################
- # Note Run the following in a different thread, while keeping access to sl_ai
- # Create SmartLearning AI System
- sl_ai = SmartLearning()
- # Initialize SubSystems
- sl_ai.init()
- # Load The World Information Stream (o_stream)
- # AI Actor/Agent Control Stream (i_stream)
- sl_ai.loadStreams(i_stream=StreamObject, o_stream=StreamObject)
- # Start Up AI for the first time (Booting)
- sl_ai.boot()
- # every 3 to 9 hours do a
- # sl_ai.sleep()
- # sl_ai.wake()
- # The AI should be functional now. it will take rough 9 to 48 human months to learn how to speak, and walk.
- # after 20 years roughly it will be able to program a version of it's self.
Add Comment
Please, Sign In to add comment