Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- //=============================DATA TYPE LOGIC================================
- // schema logic
- //-- generate events in a format of a schema but also data so it can be used as both
- //-- we assume we have a starting point of some "vertex" schema definition
- {
- "LH": {
- "key": "nonNegativeInteger",
- "runtime": {
- "ref": "xxx",
- "params": {}
- }
- }},
- {
- "$schema": "http://json-schema.org/draft-04/schema#",
- "definitions": {},
- "id": "http://example.com/example.json",
- "properties": {
- "LH": {
- "id": "/properties/LH",
- "properties": {
- "key": {
- "id": "/properties/LH/properties/key",
- "type": "string"
- },
- "runtime": {
- "id": "/properties/LH/properties/runtime",
- "properties": {
- "params": {
- "id": "/properties/LH/properties/runtime/properties/params",
- "properties": {},
- "type": "object"
- },
- "ref": {
- "id": "/properties/LH/properties/runtime/properties/ref",
- "type": "string"
- }
- },
- "type": "object"
- }
- },
- "type": "object"
- }
- },
- "type": "object"
- }
- {
- "type": "object",
- "children": [
- {
- "type": "property",
- "key": {
- "type": "identifier",
- "value": "LH"
- },
- "value": {
- "type": "object",
- "children": [
- {
- "type": "property",
- "key": {
- "type": "identifier",
- "value": "key"
- },
- "value": {
- "type": "literal",
- "value": "nonNegativeInteger",
- "rawValue": "\"nonNegativeInteger\""
- }
- },
- {
- "type": "property",
- "key": {
- "type": "identifier",
- "value": "runtime"
- },
- "value": {
- "type": "object",
- "children": [
- {
- "type": "property",
- "key": {
- "type": "identifier",
- "value": "ref"
- },
- "value": {
- "type": "literal",
- "value": "xxx",
- "rawValue": "\"xxx\""
- }
- },
- {
- "type": "property",
- "key": {
- "type": "identifier",
- "value": "params"
- },
- "value": {
- "type": "object",
- "children": []
- }
- }
- ]
- }
- }
- ]
- }
- }
- ]
- }
- JSON Origina
- // generate schema for our JSON
- var schemagraph = [[1, 'ref', 'literal'], [2, 'key', 'literal'], [3, 'LH', 'object']]
- subject, predicate, object
- assignId to initial stream: [[1,1,1, dateTime],[],[]] -- > each new schema item will increment the concept
- Concept Logical Levels: (as we move the subject/predicate/object - we just shift the numbers)
- -- no assert -- [[0,1,1, dateTime],[],[], TwitterLikeInt]
- -- concept 1 -- [[1,1,1, dateTime],[],[], TwitterLikeInt]
- -- concept 2 -- [[2,1,1, dateTime],[],[], TwitterLikeInt]... [[4,1,1, dateTime],[],[], TwitterLikeInt] ... [[1,1,1, dateTime],[],[], TwitterLikeInt]
- -- concept 3 -- [[3,1,1, dateTime],[],[], TwitterLikeInt]
- -- concept 4 -- [[4,1,1, dateTime],[],[], TwitterLikeInt]
- -- concept 2.0 -- [[2,1,1],[[2,1,1, dateTime],[],[], TwitterLikeInt]]
- -- concept 2.1 -- [[1,2,1],[[2,1,1, dateTime],[],[], TwitterLikeInt]]
- Context Traversal:
- -- concept 2.1 -- [[[1,2,1],[1,2,1]],[[2,1,1, dateTime],[],[], TwitterLikeInt]]
- -- Declare an item
- -- Assert an item
- [axiom, [class]]
- axiom - 2,1,1 -- itemId, subclass level, ontologyId -- interpretation
- base class - 2,1,1 -- individual cardinality, concept, ontologyId -- here is where the individual is grounded
- Individuals - before assertion:
- -- level 0 -- [[0,1,1],[[0,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 1 -- [[0,1,1],[[0,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 2 -- [[0,1,1],[[0,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 3 -- [[0,1,1],[[0,1,1, dateTime],[],[], TwitterLikeInt]]
- Individuals - after assertion (to the same stream): (we need to assert as a base class, subclasses get a new item created)
- -- level 0 -- [[1,1,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]] [ predicate section ] [ object section ]
- -- level 1 -- [[1,1,1],[[2,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 2 -- [[1,2,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 3 -- [[1,3,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]]
- Individuals - subclass assertion:
- -- level 0 -- [[1,1,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]] [ predicate section ] [ object section ]
- -- level 1 -- [[1,1,1],[[1,2,1, dateTime],[],[], TwitterLikeInt]]
- -- level 2 -- [[1,2,1],[[1,3,1, dateTime],[],[], TwitterLikeInt]]
- -- level 3 -- [[1,3,1],[[1,4,1, dateTime],[],[], TwitterLikeInt]]
- Individuals - after assertion - DIFFERENT stream:
- -- level 0 -- [[1,1,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]] [ predicate section ] [ object section ]
- -- level 1 -- [[1,1,1],[[2,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 2 -- [[1,2,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]]
- -- level 3 -- [[1,3,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]]
- schema subject shift UP logical hierarchy: [1,5,3] [5,3,9] [3,9,16] ++ NewPredicate ++ old object (inferencing new level) (+ add edge SameAs)
- schema subject shift DOWN logical hierarchy: [1,5,3] [10,1,5] [2,10,1]
- schema predicate shift DOWN logical hierarchy: [1,5,3] [1,5,2] [1,5,1] ++ NewPredicate ++ new objectId (inferencing new connection) (+ add edge SameAs)
- -- in some cases when under conditions we may need to not add SameAs or add "not the same as"
- - how do we draw subclass - we can safely say that every stream has its own schema
- - for other types of
- --
- [[1,3,1],[[1,1,1, dateTime],[],[], TwitterLikeInt]] and __ [1,3,1]__[1,5,2]__[8,3,4] ____ and [[8,3,4],[[1,1,1, dateTime],[],[], TwitterLikeInt]]
- schema:
- subject: objectFullPath
- predicate: 'has' (type: property)
- object: propertyFullPath (value.type:'literal )
- // Add ast parsing - so we can easily parse JSON
- // === This still executes as Realine non-core code - so we can prep the data for actual upload
- // initialize parent object stream
- // declare objects/properties/functions/schema
- // assert objects/properties/functions/schema
- function JsonObjectDeclaration(parent, element) ( if element.type = "object" then .. run('DeclareState', element.key), ) //these we put in the top object stream
- function JsonObjectAssertion(parent, element) ('here we push individual to specific stream or leave asserted in the parent stream')
- function JsonObjectSchemaAssertion(element) ( 'need to finalize the format')
- // Here we start declaration and assertion process in Realine
- // -- initilize a specific stream (we dump data there in a proper format, but not asserted yet)
- // -- separately upload schema into a different stream
- // -- compose schema and attach mappings to Realine existing schemas - if such exist (on low level, we should have variable type etc, if not, we just create it)
- // -- push the schema to be Realine Ontology schema (as classes, object properties, data properties + mappings to runtime workers etc.)
- // -- here we need to capture relationship such as subclass vs object property etc. (we should be able to see the resulting schema on UI)
- // -- for the above point, we should probably create a JSON decomposition grammar - so we do not have to do the mappings every time
- // -- run assertion process on the properties - we push the value through the pipeline in an iterative cycle (basically - like a business rule - equivalent class)
- // -- asserted classes, data props, etc. are pushed to respective streams (copied multiple times if necessary) - for every Class we create a stream
- // -- we have a UI worker that gets the messages and shows them on a UI Json tree (now we have 3 tree on the page - original, ontology schema, assertion results)
- // -- we can compare the original vs. result and if we have any ambiguities regarding type (multiple assertions etc - we need to mark what is same/different individual, add disjoints etc. )
- // -- we rerun the assertion as many times as needed
- // -- now we should be able to start querying the object, add more individuals to it or expand schema
- // -- we can also take 2-3 more jsons and merge them and return a specific axiom that will create a new stream
- var RealineSchemaState = [[1, 'ref', 'literal'], [2, 'key', 'literal'], [3, 'LH', 'object']]
- function JsonPropertyAssertion(parent, child)
- function PropertyDeclaration(value) { var prop = [1, dateTime.Now(), ] return message }
- function PropertyAssertion(propertyDeclaration) { var message = [] return message}
- Property('dog') = value
- function stream(functionDeclaration, params[]) { var instance = [] return instance }
- arguments = [Property("ref", ...), ]
- var state = stream('objectAssert', arguments)
- function DeclareState() { return []}
- function router(element) { run('runJsonObjectSchemaAssertion', element) }
- // Json processing
- // 0. Parse to AST
- // 1. Generate schema
- // 2. Upload data
- // runtime schema
- -- argument
- -- parameter
- -- function
- -- function.name
- -- value
- -- functionDefinition // this is what is described in the code -- intersect, declare
- // function definition
- function abc(params) { run(....) return value}
- abc(argument) = value
- //
- // schema[] schema.push(functionDefinition) ==> value = run(function.name, argument) // our worker
- // data = data.push(value)
- // abstraction level --> schemaGraph
- // grammar item for the schema graph --> nodeType == property
- // and then we add a specific item
- schema: [s(id)(SchemaItem, type=property), p-(has), o-(Params: type:literal), v=key.value ("ref" as name)] // (schemaitem, schemaConcept, schmagrpah)
- // when we create individuals - we do not create an explicit edge - instead we use our 3 digit ID
- data: [s(id, schemaItemId, schemaId), p(has??literal), o(id, value--xxx) ] // here we also need to use ontology logic on high level (data property)
- state: [subject +++ data ] // individuals
- ------------------ ^^^^^ graph logic
- ------------------ ontology logic
- stream: [subject +++ schema] --> filter by schemaid, schemaItemId --> push to state // schema --> filter/assertion --
- --- [s[2,2,2],098980980, [params] [props], pred(params, props), object[state-msg1, state-msg-2,..]]
- --- [s, p, o[state]] == graph == object
- --- schema --> linked to stream subject conceptId/schemaId == ontology class
- --- query --> linked to stream predicate conceptId/schemaId == object property (in schema we have detailed mapping to methdos - we need to account for get and put)
- --- object --> message array --> we can use it reconstruct the "data tree"
- --- stream = vertex, state = props
- --- stream predicate link = function -- params, props==arguments (definition link), (input) object (output - (*-generated when processed), 2, 2 ) [props] [params: value="xxx"]
- --- function[predicate] abc(object) { [params] return subject}
- // when we move data from stream to stream --> means we do "assertion"
- // when we add to a specific stream --> we do assertion
- =================================
- -- messages -- props, params [subject, predcate, object] --> state -- vertex+props/params // graph
- state -- [id, [props], [params], label?]
- property --- [subjId, predicate(has), objectId [params], [props=value] ]
- -- streams -- classes[class/object property/data property] --> stream -- object(props/params), objectProperty(props/params), dataProperty(props, params)// ontology
- -- how do we create the schema for graph and ontology
- -- how do we store the data for graph and ontology
- ------------------
- -- what elements do we have in schema
- -- what is the order
- -- what are the params
- -- need to reduce it to non-repeating values
- -- need to be able to take our schema and take apart the data to messages later
- ------------------
- -- take apart data based on the schema
- ------------------
- // schema generation
- [subject (schema), predicate(has), object(node-LH -- key, runtime)]
- // data
- const SchemaItemId = 2; // each will have their schema when it is created - we need to create schema first
- let sequenceId = 1;
- let value = ['abc', 'hdhd'];
- const property = [[ sequenceId++, SchemaItemId, Date.now()], value];
- let parameter = [[sequenceId++, SchemaItemId, Date.now()], []];
- let object = [[sequenceId++, SchemaItemId, Date.now()], [property, property], []];
- let functionDefinition = [object, [[sequenceId++, SchemaItemId, Date.now()], [], [parameter, parameter]]]; // edge in schema == function
- const message = [object, [[sequenceId++, SchemaItemId, Date.now()], [], [parameter, parameter]], object]; // edge in data
- const input = [[sequenceId++, SchemaItemId, Date.now()], [vertexProperty, vertexProperty], []];
- let output = message;
- const functionBody = '';// workflow = array of schema items
- parameter = [[[sequenceId++, SchemaItemId, Date.now()], []], [[sequenceId++, SchemaItemId, Date.now()], []]]; /// edge params style
- const functionName = 'text' // from schema;
- functionDefinition = [functionName, functionBody, parameter];
- const argument = [vertex]; // [[sequenceId++, SchemaItemId, Date.now()], [vertexProperty, vertexProperty], []];
- //output = run(functionDefinition, argument, input); // == [[sequenceId++, SchemaItemId, Date.now()], [vertexProperty, vertexProperty], []];
- //==================== GRPAH LOGIC ============================
- const vertexProperty = property;
- const edgeParameter = parameter
- const vertex = object;
- const edgeSchema = functionDefinition;
- const edgeData = message
- const graph = [[vertex, message] , [vertex, message] , [vertex, message] ]
- //================= STATE LOGIC =================================
- const subject = vertex;
- const predicate = []; // function name linking to functionDefinition???
- object = vertex;
- const state = [graph, graph];
- const schema = [];
- const stream = [state, schema]; // stream is a just a function schema-graph, state-graph ---->>>> state2-graph
- //================= ONTOLOGY LOGIC =================================
- const ontology = [stream, stream]; // schema of streams --- all logic pretty much the same as above - stream of functions
- const classIndividuals = [state, state] // schema of states -- stream of states
- //const agent = [[ontology,classIndividuals ], [ontology,classIndividuals ]]
- //========================== FUNCTIONAL LOGIC ======================
- // runtime schema
- -- argument
- -- parameter
- -- function
- -- function.name
- -- value
- // event logic
- -- delegate (run, delegate, onAction)
- -- workerUnit (webworkerUnit, runtimeUnit) (class) -- these go per workspace
- -- worker (intersect, sum, save )
- -- supervisor (onWorker???) (this will be a callback)
- -- router (onEvent - contextRouter)
- -- message
- -- event
- -- action
- // state logic
- -- delegateState (persistent)
- -- workerUnit (...)
- --> push first event based on JSON
- --> generate event on UI
- -- Event -> "run" --> delegate --> workerUnit(command) --> worker (declares) --> |result| --> supervisor(asserts) >> ? write to stream : discard >> callsLOnEvent(result) --> Event to Subscribers==delegate
- // command logic
- -- action --> "do" --> onStream(delegate) --> workerUnit(?)....
- ===================================================================================
- // assertEvent --> 'call action'
- // it is subscribed to specific states to get events
- function perceptor(message) { // on event
- // on event external
- // message -- save it or proces otherwise -- save plans we sent so we can confirm completion
- var actions = getPlans(message); // actions: [[workername, params]]
- delegate(actions); // 'intersect', 'array1, array2'
- }
- const conditionsState = [] // beliefs
- const delegateState = [];
- // business logic condition check to decide when to execute action (eg - it may wait for validation completed)
- // brokers out the action to a specific worker
- function delegate(action) {
- // get plan or hold the action
- // save actions to delegateState
- // check pre-conditions to fire actions --- intersect with some other state
- var currentActions = delegateState.intersect(conditionsState);
- foreach(currentActions) {}
- // here w will be calling worker units, but without millions of callbacks and need to manage with state
- run('functionaName', 'params', supervisor) // internal event
- }
- }
- const supervisorState = [];
- // check worker results and 'declares' that an event has happened
- function supervisor(result) { // it checks if the worker results are satisfactory // intrnal actions
- // if true, save to all states and push a list to to router (on event) as an array
- if result good then
- workspace(result); // maybe call it in a loop?
- else
- delegate(...)
- end
- }
- const workspaceState = []
- // manages all states, streams and agents
- function workspace(message) { // it keeps events and manages subscriptions == router
- // we need to get a list of subscribers
- foreach....(perceptor(message)) // external event
- }
- =================== worker functions =========== // intersect, sum, callApi, render .....
- function worker(state) { /// this works in webworker/serviceworker/serverside
- const state = [];
- // it can call other functions from code
- // worker is written in code as our helper function
- }
- function getPlan()
- function generatePlan()
- function run() { return }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement