Skip to content

Commit

Permalink
Fix to crashing Property pages
Browse files Browse the repository at this point in the history
Tweeks to response content types
  • Loading branch information
Dataliberate committed Feb 21, 2019
1 parent 6bf2051 commit cdc0c5c
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 61 deletions.
27 changes: 25 additions & 2 deletions apirdfterm.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ def loadSubjects(self,pred):

def loadsupers(self):
fullId = toFullId(self.id)
#log.info("checksupers(%s)" % self.id)
#log.info("loadsupers(%s)" % self.id)
if self.ttype == VTerm.CLASS or self.ttype == VTerm.DATATYPE:
sel = "rdfs:subClassOf"
else:
Expand Down Expand Up @@ -387,7 +387,7 @@ def loadsubs(self):
continue
sortedAddUnique(self.subs,sub)

if self.ttype == VTerm.ENUMERATION:
if self.ttype == VTerm.ENUMERATION or self.ttype == VTerm.DATATYPE:
subjects = self.loadSubjects("a") #Enumerationvalues have an Enumeration as a type
for child in subjects:
sub = VTerm._getTerm(str(child))
Expand Down Expand Up @@ -637,7 +637,30 @@ def query(q):
with RDFLIBLOCK:
ret = list(graph.query(q))
return ret

@staticmethod
def term2str(t):
terms = t
if not isinstance(t, list):
terms = [t]
ret = []
for term in terms:
ret.append(str(term))
return ret

@staticmethod
def term2id(t):
terms = t
if not isinstance(t, list):
terms = [t]
ret = []
for term in terms:
ret.append(term.getId())
return ret




#############################################
def toFullId(termId):

Expand Down
6 changes: 0 additions & 6 deletions app.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,6 @@ handlers:
mime_type: text/plain

# To avoid: "Could not guess mimetype for docs/schemaorg.owl. Using application/octet-stream."
- url: /docs/schemaorg.owl
static_files: docs/schemaorg.owl
upload: docs/schemaorg.owl
application_readable: True
mime_type: application/rdf+xml

- url: /docs/schema_org_rdfa.html
static_files: data/schema.rdfa
upload: data/schema.rdfa
Expand Down
91 changes: 46 additions & 45 deletions sdoapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,11 +779,12 @@ def BreadCrumbs(self, term):

self.crumbStacks = term.getParentPaths()

if term.isProperty():
cstack.append(VTerm.getTerm("http://schema.org/Property"))
cstack.append(VTerm.getTerm("http://schema.org/Thing"))
elif term.isDataType():
cstack.append(VTerm.getTerm("http://schema.org/DataType"))
for cstack in self.crumbStacks:
if term.isProperty():
cstack.append(VTerm.getTerm("http://schema.org/Property"))
cstack.append(VTerm.getTerm("http://schema.org/Thing"))
elif term.isDataType():
cstack.append(VTerm.getTerm("http://schema.org/DataType"))


enuma = term.isEnumerationValue()
Expand Down Expand Up @@ -942,17 +943,16 @@ def emitClassExtensionSuperclasses (self, cl, layers="core", out=None):
count = 0
if not out:
out = self

buff = StringIO.StringIO()
sc = Unit.GetUnit("rdfs:subClassOf")

#log.info("SUPERS %s" % VTerm.term2str(cl.getSupers()))

for p in GetTargets(sc, cl, ALL_LAYERS):
for p in cl.getSupers():

if inLayer(layers,p):
if not p.isReference() and p.inLayers(layers):
continue

if p.id == "http://www.w3.org/2000/01/rdf-schema#Class": #Special case for "DataType"
p.id = "Class"

sep = ", "
if first:
Expand All @@ -967,7 +967,7 @@ def emitClassExtensionSuperclasses (self, cl, layers="core", out=None):

content = buff.getvalue()
if(len(content) > 0):
if cl.id == "DataType":
if cl.getId() == "DataType":
self.write("<h4>Subclass of:<h4>")
else:
self.write("<h4>Available supertypes defined elsewhere</h4>")
Expand All @@ -976,23 +976,24 @@ def emitClassExtensionSuperclasses (self, cl, layers="core", out=None):
self.write("</ul>")
buff.close()

def emitClassExtensionProperties (self, cl, layers="core", out=None):
if not out:
out = self

buff = StringIO.StringIO()
""" def emitClassExtensionProperties (self, cl, layers="core", out=None):
if not out:
out = self
for p in self.parentStack:
self._ClassExtensionProperties(buff, p, layers=layers)
buff = StringIO.StringIO()
content = buff.getvalue()
if(len(content) > 0):
self.write("<h4>Available properties in extensions</h4>")
self.write("<ul>")
self.write(content)
self.write("</ul>")
buff.close()
for p in self.parentStack:
self._ClassExtensionProperties(buff, p, layers=layers)
content = buff.getvalue()
if(len(content) > 0):
self.write("<h4>Available properties in extensions</h4>")
self.write("<ul>")
self.write(content)
self.write("</ul>")
buff.close()
"""

def _ClassExtensionProperties (self, out, cl, layers="core"):
"""Write out a list of properties not displayed as they are in extensions for a per-type page."""

Expand Down Expand Up @@ -1409,7 +1410,7 @@ def emitExactTermPage(self, term, layers="core"):
self.emitUnitHeaders(term) # writes <h1><table>...
stack = self._removeStackDupes(term.getTermStack())
setAppVar("tableHdr",False)
if term.isClass():
if term.isClass() or term.isDataType():
for p in stack:
self.ClassProperties(p, p==[0], out=self, term=term)
if getAppVar("tableHdr"):
Expand All @@ -1418,9 +1419,9 @@ def emitExactTermPage(self, term, layers="core"):

self.emitClassIncomingProperties(term)

#self.emitClassExtensionSuperclasses(node,layers)
self.emitClassExtensionSuperclasses(term,layers)

#self.emitClassExtensionProperties(p,layers)
#self.emitClassExtensionProperties(p,layers) #Not needed since extension defined properties displayed in main listing

elif term.isProperty():
self.emitAttributeProperties(term)
Expand Down Expand Up @@ -1509,7 +1510,7 @@ def emitAcksAndSources(self,term):
def emitchildren(self,term):
children = term.getSubs()

log.info("CILDREN: %s" % children)
log.info("CHILDREN: %s" % VTerm.term2str(children))

if (len(children) > 0):
buff = StringIO.StringIO()
Expand Down Expand Up @@ -1572,24 +1573,22 @@ def handleJSONContext(self, node):
self.error(404)
self.response.out.write('<title>404 Not Found.</title><a href="/">404 Not Found (JSON-LD Context not enabled.)</a><br/><br/>')
return True
label = "jsonld:jsonldcontext.jsonld"
if (node=="docs/jsonldcontext.json.txt"):
label = "jsonldcontext.json.txt"
self.response.headers['Content-Type'] = "text/plain"
elif (node=="docs/jsonldcontext.json"):
label = "jsonldcontext.json"
self.response.headers['Content-Type'] = "application/ld+json"
else:
return False

jsonldcontext = getPageFromStore(label)
if not jsonldcontext:
jsonldcontext = GetJsonLdContext(layers=ALL_LAYERS)

PageStore.put(label,jsonldcontext)

if PAGESTOREMODE == "CLOUDSTORE":
cloudstoreStoreContent("docs/jsonldcontext.json", jsonldcontext, "html")
cloudstoreStoreContent("docs/jsonldcontext.json.txt", jsonldcontext, "html")

if (node=="docs/jsonldcontext.json.txt"):
self.response.headers['Content-Type'] = "text/plain"
self.emitCacheHeaders()
self.response.out.write( jsonldcontext )
return True
if (node=="docs/jsonldcontext.json"):
self.response.headers['Content-Type'] = "application/ld+json"
if jsonldcontext:
self.emitCacheHeaders()
self.response.out.write( jsonldcontext )
return True
Expand Down Expand Up @@ -1840,7 +1839,6 @@ def checkNodeExt(self,term):
home = term.getLayer()
ext = getHostExt()
log.info("term: '%s' home: '%s' ext: '%s'" % (term,home,ext))
log.info("Supers: %s" % term.getSupers())
if home == CORE and ext == '':
return True

Expand Down Expand Up @@ -1872,7 +1870,7 @@ def handleExactTermDataOutput(self, node=None, outputtype=None):
term = VTerm.getTerm(node)
if term:
ret = True
index = "%s:%s" % (outputtype,node)
index = "%s:%s%s" % (outputtype,node,outputtype)
data = getPageFromStore(index)

excludeAttic=True
Expand All @@ -1888,6 +1886,9 @@ def handleExactTermDataOutput(self, node=None, outputtype=None):
if outputtype == ".jsonld":
self.response.headers['Content-Type'] = "application/ld+json; charset=utf-8"
format = "json-ld"
elif outputtype == ".json":
self.response.headers['Content-Type'] = "application/json; charset=utf-8"
format = "json"
elif outputtype == ".ttl":
self.response.headers['Content-Type'] = "application/x-turtle; charset=utf-8"
format = "turtle"
Expand Down Expand Up @@ -2391,7 +2392,7 @@ def get(self, node):
HeaderStore.put(hdrIndex,retHdrs) #Cache these headers for a future 304 return

#self.response.set_cookie('GOOGAPPUID', getAppEngineVersion())
log.info("Responding: node: %s status: %s. headers: \n%s" % (node,self.response.status,self.response.headers ))
log.info("Responding:\n%s\nstatus: %s\n%s" % (node,self.response.status,self.response.headers ))


def _get(self, node, doWarm=True):
Expand Down
30 changes: 22 additions & 8 deletions sdocloudstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,23 @@ def buildBucketFile(self,filename,ftype,location):

mimetype, contentType = mimetypes.guess_type(bucketFile)

if not mimetype and ftype == "html":
mimetype = "text/html"

#log.info("buildBucketFile: %s %s (%s)" % (bucketFile,mimetype,contentType))
if not mimetype:
if ftype == "html":
mimetype = "text/html; charset=utf-8"
elif ftype == "jsonld" or ftype == "json-ld" :
mimetype = "application/ld+json; charset=utf-8"
elif ftype == "json":
mimetype = "application/json; charset=utf-8"
elif ftype == "ttl":
mimetype = "application/x-turtle; charset=utf-8"
elif ftype == "rdf" or ftype == "xml":
mimetype = "application/rdf+xml; charset=utf-8"
elif ftype == "nt":
mimetype = "text/plain"
elif ftype == "txt":
mimetype = "text/plain"

#log.info("buildBucketFile: %s '%s' (%s)" % (bucketFile,mimetype,contentType))

return bucketFile, mimetype

Expand All @@ -138,6 +151,7 @@ def getPath(self,bucketFile):

# [START write]
def writeFormattedFile(self, filename, ftype=None, location=None, content="", raw=False, private=False, extrameta=None):
#log.info("writeFormattedFile(%s, ftype=%s, location=%s, contentlen=%d, raw=%s, private=%s, extrameta=%s)" % (filename, ftype, location, len(content), raw, private, extrameta))
"""Create a file."""
bucketFile, mtype = self.buildBucketFile(filename,ftype,location)
if ftype != 'html':
Expand Down Expand Up @@ -222,7 +236,7 @@ def _write_json_file(self, bucketFile, mtype="application/json", data={}, privat

# [START stat]
def statFormattedFile(self, filename, ftype="html", location=None, cache=True):
log.info("statFormattedFile(%s,%s,%s,%s)" % (filename, ftype, location, cache))
#log.info("statFormattedFile(%s,%s,%s,%s)" % (filename, ftype, location, cache))
bucketFile, mtype = self.buildBucketFile(filename,ftype,location)
return self.stat_file(bucketFile, ftype, cache)

Expand All @@ -231,7 +245,7 @@ def stat_file(self, bucketFile, ftype=None, cache=True):
return self._stat_file(bucketFile, ftype=ftype, cache=cache)

def _stat_file(self, bucketFile, ftype=None, cache=True):
log.info("_stat_file(%s,%s,%s)" % (bucketFile, ftype, cache))
#log.info("_stat_file(%s,%s,%s)" % (bucketFile, ftype, cache))
ret = None
if cache:
item = self.readCache(bucketFile,ftype)
Expand All @@ -258,7 +272,7 @@ def _stat_file(self, bucketFile, ftype=None, cache=True):

# [START read]
def readFormattedFile(self, filename, ftype="html", location=None, cache=True):
log.info("readFormattedFile(%s,%s,%s,%s)" % (filename,ftype,location,cache))
#log.info("readFormattedFile(%s,ftype=%s,location=%s,cache=%s)" % (filename,ftype,location,cache))
stat, content = self.getFormattedFile(filename, ftype, location, cache)
return content

Expand All @@ -269,7 +283,7 @@ def read_file(self, bucketFile, cache=True, stat=None):


def getFormattedFile(self, filename, ftype="html", location=None, cache=True):
log.info("getFormattedFile(%s,%s,%s,%s)" % (filename,ftype,location,cache))
#log.info("getFormattedFile(%s,%s,%s,%s)" % (filename,ftype,location,cache))

bucketFile, mtype = self.buildBucketFile(filename,ftype,location)
stat, content = self.get_file(bucketFile,reqtype=ftype, cache=cache)
Expand Down

0 comments on commit cdc0c5c

Please sign in to comment.