HaikuDepot : Generate Server Model + Parser

The server uses JSON schema to generate some data-transfer-object (DTO)
models for communication with other systems.  This same schema can be
used to generate the C++ .cpp and .h files for use in HaikuDepot.  So
far these have been generated by hand and then manually added to the
Haiku repo.  Now the schema files can be copied over and from those, the
sources are generated.

Change-Id: Ia288cb7a50843e5e2bc403a6ce55508a04218c04
Reviewed-on: https://review.haiku-os.org/c/858
Reviewed-by: Adrien Destugues <pulkomandy@gmail.com>
This commit is contained in:
Andrew Lindesay 2019-01-04 00:46:33 +01:00
parent 3288c09996
commit 81dab2139e
24 changed files with 2190 additions and 4116 deletions

View File

@ -1,7 +1,44 @@
SubDir HAIKU_TOP src apps haikudepot ;
include [ FDirName $(HAIKU_TOP) src apps haikudepot build jam
HdsSchemaGenRules ] ;
UsePrivateHeaders interface kernel net package shared storage support ;
local generatedTargetDirectory = $(TARGET_COMMON_DEBUG_LOCATE_TARGET) ;
# During the build process, some sources are generated into directories. These
# are the directories.
local dumpExportRepositoryBulkListerTargetDirectory =
[ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
dumpexportrepositorybulklistener ] ;
local dumpExportPkgBulkListenerTargetDirectory =
[ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
dumpexportpkgbulklistener ] ;
local dumpExportPkgModelTargetDirectory =
[ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
dumpexportpkgmodel ] ;
local dumpExportRepositoryModelTargetDirectory =
[ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
dumpexportrepositorymodel ] ;
# During the build process, some sources are generated into a directory. In
# order to maintain a timestamp on that generation process, a dummy file is
# used to signify the target of the generation. The leafname of this dummy file
# is defined here.
local dummyFile = "dummy.dat" ;
local dumpExportRepositoryBulkListerTargetFile =
[ FDirName $(dumpExportRepositoryBulkListerTargetDirectory) $(dummyFile) ] ;
local dumpExportPkgBulkListenerTargetFile =
[ FDirName $(dumpExportPkgBulkListenerTargetDirectory) $(dummyFile) ] ;
local dumpExportPkgModelTargetFile =
[ FDirName $(dumpExportPkgModelTargetDirectory) $(dummyFile) ] ;
local dumpExportRepositoryModelTargetFile =
[ FDirName $(dumpExportRepositoryModelTargetDirectory) $(dummyFile) ] ;
# source directories
local sourceDirs =
edits_generic
@ -10,8 +47,6 @@ local sourceDirs =
ui
ui_generic
server
server/dumpexportrepository
server/dumpexportpkg
tar
util
;
@ -22,6 +57,10 @@ for sourceDir in $(sourceDirs) {
}
SEARCH_SOURCE += [ FDirName $(HAIKU_TOP) src servers package ] ;
SEARCH_SOURCE += $(dumpExportRepositoryBulkListerTargetDirectory) ;
SEARCH_SOURCE += $(dumpExportPkgBulkListenerTargetDirectory) ;
SEARCH_SOURCE += $(dumpExportPkgModelTargetDirectory) ;
SEARCH_SOURCE += $(dumpExportRepositoryModelTargetDirectory) ;
local textDocumentSources =
# edits_generic
@ -51,7 +90,7 @@ local textDocumentSources =
UndoableEditListener.cpp
;
Application HaikuDepot :
local applicationSources =
App.cpp
BarberPole.cpp
BitmapButton.cpp
@ -86,16 +125,6 @@ Application HaikuDepot :
UserLoginWindow.cpp
WorkStatusView.cpp
# network + server - model
DumpExportPkg.cpp
DumpExportPkgCategory.cpp
DumpExportPkgJsonListener.cpp
DumpExportPkgScreenshot.cpp
DumpExportPkgVersion.cpp
DumpExportRepository.cpp
DumpExportRepositorySource.cpp
DumpExportRepositoryJsonListener.cpp
# network + server / local processes
AbstractProcess.cpp
AbstractServerProcess.cpp
@ -128,10 +157,48 @@ Application HaikuDepot :
# package_daemon
ProblemWindow.cpp
ResultWindow.cpp
;
# text view stuff
$(textDocumentSources)
local generatedPkgModelSourceFiles =
DumpExportPkg.cpp
DumpExportPkgCategory.cpp
DumpExportPkgScreenshot.cpp
DumpExportPkgVersion.cpp
;
local generatedRepositoryModelSourceFiles =
DumpExportRepository.cpp
DumpExportRepositorySource.cpp
DumpExportRepositorySourceMirror.cpp
;
local generatedPkgParserSourceFiles =
DumpExportPkgJsonListener.cpp
;
local generatedRepositoryParserSourceFiles =
DumpExportRepositoryJsonListener.cpp
;
local generatedSourceFiles =
# network + server - model
DumpExportPkg.cpp
DumpExportPkgCategory.cpp
DumpExportPkgScreenshot.cpp
DumpExportPkgVersion.cpp
DumpExportRepository.cpp
DumpExportRepositorySource.cpp
DumpExportRepositorySourceMirror.cpp
# network + server - parser
DumpExportPkgJsonListener.cpp
DumpExportRepositoryJsonListener.cpp
;
Application HaikuDepot
: $(applicationSources) $(textDocumentSources)
$(generatedPkgModelSourceFiles) $(generatedRepositoryModelSourceFiles)
$(generatedPkgParserSourceFiles) $(generatedRepositoryParserSourceFiles)
: be package bnetapi translation libcolumnlistview.a shared
[ TargetLibstdc++ ] [ TargetLibsupc++ ] localestub
: HaikuDepot.rdef
@ -161,3 +228,50 @@ Application TextDocumentTest :
: be translation shared [ TargetLibsupc++ ]
;
# The following use of rules configures the generation of .cpp and .h files from
# JSON schemas that are defined in the HaikuDepotServer system. See the
# included Jam rules and actions for the definitions.
HdsSchemaGenModel $(dumpExportPkgModelTargetFile)
: dumpexportpkg.json : jsonschema2cppmodel.py ;
HdsSchemaGenModel $(dumpExportRepositoryModelTargetFile)
: dumpexportrepository.json : jsonschema2cppmodel.py ;
HdsSchemaGenBulkParser $(dumpExportRepositoryBulkListerTargetFile)
: dumpexportrepository.json : jsonschema2cppparser.py ;
HdsSchemaGenBulkParser $(dumpExportPkgBulkListenerTargetFile)
: dumpexportpkg.json : jsonschema2cppparser.py ;
HdsSchemaGenAppSrcDependsOnGeneration
[ FGristFiles $(generatedPkgParserSourceFiles) ]
[ FGristFiles $(generatedPkgParserSourceFiles:S=.h) ]
: $(dumpExportPkgBulkListenerTargetFile) ;
HdsSchemaGenAppSrcDependsOnGeneration
[ FGristFiles $(generatedRepositoryParserSourceFiles) ]
[ FGristFiles $(generatedRepositoryParserSourceFiles:S=.h) ]
: $(dumpExportRepositoryBulkListerTargetFile) ;
HdsSchemaGenAppSrcDependsOnGeneration
[ FGristFiles $(generatedRepositoryModelSourceFiles) ]
[ FGristFiles $(generatedRepositoryModelSourceFiles:S=.h) ]
: $(dumpExportRepositoryModelTargetFile) ;
HdsSchemaGenAppSrcDependsOnGeneration
[ FGristFiles $(generatedPkgModelSourceFiles) ]
[ FGristFiles $(generatedPkgModelSourceFiles:S=.h) ]
: $(dumpExportPkgModelTargetFile) ;
# This will ensure that if any of the generated files' header files change, then
# the application should be re-built.
Depends [ FGristFiles $(applicationSources:S=.o) ]
:
[ FGristFiles $(generatedPkgParserSourceFiles:S=.h) ]
[ FGristFiles $(generatedRepositoryParserSourceFiles:S=.h) ]
[ FGristFiles $(generatedRepositoryModelSourceFiles:S=.h) ]
[ FGristFiles $(generatedPkgModelSourceFiles:S=.h) ]
;

View File

@ -0,0 +1,93 @@
# =====================================
# Copyright 2019, Andrew Lindesay
# Distributed under the terms of the MIT License.
# =====================================
# HaikuDepotServer has a number of data-transfer-objects (DTO) that are defined
# by JSON schemas. The server uses these schemas to produce the objects at
# compile time. Likewise, the schema files also generate C++ side DTO model
# objects in the form of .cpp and .h files as well. This way the
# HaikuDepotServer server and HaikuDepot desktop application are able to
# communicate more 'safely'. The schema files still need to be copied from
# the server source to the Haiku source, but the generation process will ensure
# that the data-structures are consistent.
#
# The C++ side classes are generated with python scripts that are included in
# the Haiku source. These rules and actions take care of making sure that the
# python scripts are run when necessary to generate the C++ side classes. Note
# that there are two sorts of classes generated here; the model DTO objects and
# also the supporting classes that parse the DTO objects. The parsing classes
# are intended to be used with Haiku JSON parsing systems.
# pragma mark - Generic
actions HdsSchemaGenTouch
{
touch $(1)
}
# pragma mark - Model Class Generation
# 1 : the dummy file in the class generation directory (target)
# 2 : the JSON schema file
# 3 : the Python script to use
rule HdsSchemaGenModel
{
SEARCH on $(2) = [ FDirName $(SUBDIR) server schema ] ;
SEARCH on $(3) = [ FDirName $(SUBDIR) build scripts ] ;
Clean $(1:D) ;
Depends $(1) : $(2) $(3) ;
MkDir $(1:D) ;
HdsSchemaGenModel1 $(1) : $(2) $(3) $(1:D) ;
HdsSchemaGenTouch $(1) ;
}
actions HdsSchemaGenModel1
{
python $(2[2]) -i $(2[1]) --outputdirectory $(2[3])
}
# pragma mark - Bulk Parsing Class Generation
# 1 : the dummy file in the class generation directory (target)
# 2 : the JSON schema file
# 3 : the Python script to use
rule HdsSchemaGenBulkParser
{
SEARCH on $(2) = [ FDirName $(SUBDIR) server schema ] ;
SEARCH on $(3) = [ FDirName $(SUBDIR) build scripts ] ;
Clean $(1:D) ;
Depends $(1) : $(2) $(3) ;
MkDir $(1:D) ;
HdsSchemaGenBulkParser1 $(1) : $(2) $(3) $(1:D) ;
HdsSchemaGenTouch $(1) ;
}
actions HdsSchemaGenBulkParser1
{
python $(2[2]) -i $(2[1]) --outputdirectory $(2[3]) --supportbulkcontainer
}
# pragma mark - Registering Generated Classes
# Because a number of .cpp and .h files will be generated from a single python
# script's run, it is necessary to introduce a dependency between the known
# output files and the target for a given python script run.
# 1 : generated files (.h and .cpp)
# 2 : target that will generate the generated files
rule HdsSchemaGenAppSrcDependsOnGeneration {
local generatedSource ;
local applicationSource ;
MakeLocate $(1) : $(2:D) ;
Depends $(1) : $(2) ;
Clean $(1) ;
}

View File

@ -0,0 +1,95 @@
# =====================================
# Copyright 2017-2019, Andrew Lindesay
# Distributed under the terms of the MIT License.
# =====================================
# common material related to generation of schema-generated artifacts.
import datetime
# The possible JSON types
JSON_TYPE_STRING = "string"
JSON_TYPE_OBJECT = "object"
JSON_TYPE_ARRAY = "array"
JSON_TYPE_BOOLEAN = "boolean"
JSON_TYPE_INTEGER = "integer"
JSON_TYPE_NUMBER = "number"
# The possible C++ types
CPP_TYPE_STRING = "BString"
CPP_TYPE_ARRAY = "List"
CPP_TYPE_BOOLEAN = "bool"
CPP_TYPE_INTEGER = "int64"
CPP_TYPE_NUMBER = "double"
def uniondicts(d1, d2):
d = dict(d1)
d.update(d2)
return d
def javatypetocppname(javaname):
return javaname[javaname.rindex('.')+1:]
def propnametocppname(propname):
return propname[0:1].upper() + propname[1:]
def propnametocppmembername(propname):
return 'f' + propnametocppname(propname)
def propmetatojsoneventtypename(propmetadata):
type = propmetadata['type']
def propmetadatatocpptypename(propmetadata):
type = propmetadata['type']
if type == JSON_TYPE_STRING:
return CPP_TYPE_STRING
if type == JSON_TYPE_BOOLEAN:
return CPP_TYPE_BOOLEAN
if type == JSON_TYPE_INTEGER:
return CPP_TYPE_INTEGER
if type == JSON_TYPE_NUMBER:
return CPP_TYPE_NUMBER
if type == JSON_TYPE_OBJECT:
javatype = propmetadata['javaType']
if not javatype or 0 == len(javatype):
raise Exception('missing "javaType" field')
return javatypetocppname(javatype)
if type == JSON_TYPE_ARRAY:
itemsmetadata = propmetadata['items']
itemsjavatype = itemsmetadata['javaType']
if not itemsjavatype or 0 == len(itemsjavatype):
raise Exception('missing "javaType" field')
return "%s <%s*, true>" % (CPP_TYPE_ARRAY, javatypetocppname(itemsjavatype))
raise Exception('unknown json-schema type [' + type + ']')
def propmetadatatypeisscalar(propmetadata):
type = propmetadata['type']
return type == JSON_TYPE_BOOLEAN or type == JSON_TYPE_INTEGER or type == JSON_TYPE_NUMBER
def writetopcomment(f, inputfilename, variant):
f.write((
'/*\n'
' * Generated %s Object\n'
' * source json-schema : %s\n'
' * generated at : %s\n'
' */\n'
) % (variant, inputfilename, datetime.datetime.now().isoformat()))

View File

@ -0,0 +1,396 @@
#!/usr/bin/python
# =====================================
# Copyright 2017-2019, Andrew Lindesay
# Distributed under the terms of the MIT License.
# =====================================
# This simple tool will read a JSON schema and will then generate
# some model objects that can be used to hold the data-structure
# in the C++ environment.
import json
import argparse
import os
import hdsjsonschemacommon as jscom
import string
def hasanylistproperties(schema):
for propname, propmetadata in schema['properties'].items():
if propmetadata['type'] == 'array':
return True
return False
def writelistaccessors(outputfile, cppclassname, cppname, cppmembername, cppcontainertype):
dict = {
'cppclassname' : cppclassname,
'cppname': cppname,
'cppmembername': cppmembername,
'cppcontainertype': cppcontainertype
}
outputfile.write(
string.Template("""
void
${cppclassname}::AddTo${cppname}(${cppcontainertype}* value)
{
if (${cppmembername} == NULL)
${cppmembername} = new List<${cppcontainertype}*, true>();
${cppmembername}->Add(value);
}
void
${cppclassname}::Set${cppname}(List<${cppcontainertype}*, true>* value)
{
${cppmembername} = value;
}
int32
${cppclassname}::Count${cppname}()
{
if (${cppmembername} == NULL)
return 0;
return ${cppmembername}->CountItems();
}
${cppcontainertype}*
${cppclassname}::${cppname}ItemAt(int32 index)
{
return ${cppmembername}->ItemAt(index);
}
bool
${cppclassname}::${cppname}IsNull()
{
return ${cppmembername} == NULL;
}
""").substitute(dict))
def writelistaccessorsheader(outputfile, cppname, cppcontainertype):
dict = {
'cppname': cppname,
'cppcontainertype': cppcontainertype
}
outputfile.write(
string.Template(""" void AddTo${cppname}(${cppcontainertype}* value);
void Set${cppname}(List<${cppcontainertype}*, true>* value);
int32 Count${cppname}();
${cppcontainertype}* ${cppname}ItemAt(int32 index);
bool ${cppname}IsNull();
""").substitute(dict))
def writetakeownershipaccessors(outputfile, cppclassname, cppname, cppmembername, cpptype):
dict = {
'cppclassname': cppclassname,
'cppname': cppname,
'cppmembername': cppmembername,
'cpptype': cpptype
}
outputfile.write(
string.Template("""
${cpptype}*
${cppclassname}::${cppname}()
{
return ${cppmembername};
}
void
${cppclassname}::Set${cppname}(${cpptype}* value)
{
${cppmembername} = value;
}
void
${cppclassname}::Set${cppname}Null()
{
if (!${cppname}IsNull()) {
delete ${cppmembername};
${cppmembername} = NULL;
}
}
bool
${cppclassname}::${cppname}IsNull()
{
return ${cppmembername} == NULL;
}
""").substitute(dict))
def writetakeownershipaccessorsheader(outputfile, cppname, cpptype):
outputfile.write(' %s* %s();\n' % (cpptype, cppname))
outputfile.write(' void Set%s(%s* value);\n' % (cppname, cpptype))
outputfile.write(' void Set%sNull();\n' % cppname)
outputfile.write(' bool %sIsNull();\n' % cppname)
def writescalaraccessors(outputfile, cppclassname, cppname, cppmembername, cpptype):
dict = {
'cppclassname': cppclassname,
'cppname': cppname,
'cppmembername': cppmembername,
'cpptype': cpptype
}
outputfile.write(
string.Template("""
${cpptype}
${cppclassname}::${cppname}()
{
return *${cppmembername};
}
void
${cppclassname}::Set${cppname}(${cpptype} value)
{
if (${cppname}IsNull())
${cppmembername} = new ${cpptype}[1];
${cppmembername}[0] = value;
}
void
${cppclassname}::Set${cppname}Null()
{
if (!${cppname}IsNull()) {
delete ${cppmembername};
${cppmembername} = NULL;
}
}
bool
${cppclassname}::${cppname}IsNull()
{
return ${cppmembername} == NULL;
}
""").substitute(dict))
def writescalaraccessorsheader(outputfile, cppname, cpptype):
outputfile.write(
string.Template("""
${cpptype} ${cppname}();
void Set${cppname}(${cpptype} value);
void Set${cppname}Null();
bool ${cppname}IsNull();
""").substitute({'cppname': cppname, 'cpptype': cpptype}))
def writeaccessors(outputfile, cppclassname, propname, propmetadata):
type = propmetadata['type']
if type == 'array':
writelistaccessors(outputfile,
cppclassname,
jscom.propnametocppname(propname),
jscom.propnametocppmembername(propname),
jscom.javatypetocppname(propmetadata['items']['javaType']))
elif jscom.propmetadatatypeisscalar(propmetadata):
writescalaraccessors(outputfile,
cppclassname,
jscom.propnametocppname(propname),
jscom.propnametocppmembername(propname),
jscom.propmetadatatocpptypename(propmetadata))
else:
writetakeownershipaccessors(outputfile,
cppclassname,
jscom.propnametocppname(propname),
jscom.propnametocppmembername(propname),
jscom.propmetadatatocpptypename(propmetadata))
def writeaccessorsheader(outputfile, propname, propmetadata):
type = propmetadata['type']
if type == 'array':
writelistaccessorsheader(outputfile,
jscom.propnametocppname(propname),
jscom.javatypetocppname(propmetadata['items']['javaType']))
elif jscom.propmetadatatypeisscalar(propmetadata):
writescalaraccessorsheader(outputfile,
jscom.propnametocppname(propname),
jscom.propmetadatatocpptypename(propmetadata))
else:
writetakeownershipaccessorsheader(outputfile,
jscom.propnametocppname(propname),
jscom.propmetadatatocpptypename(propmetadata))
def writedestructorlogicforlist(outputfile, propname, propmetadata):
dict = {
'cppmembername': jscom.propnametocppmembername(propname),
'cpptype': jscom.javatypetocppname(propmetadata['items']['javaType'])
}
outputfile.write(
string.Template(""" int32 count = ${cppmembername}->CountItems();
for (i = 0; i < count; i++)
delete ${cppmembername}->ItemAt(i);
""").substitute(dict))
def writedestructor(outputfile, cppname, schema):
outputfile.write('\n\n%s::~%s()\n{\n' % (cppname, cppname))
if hasanylistproperties(schema):
outputfile.write(' int32 i;\n\n')
for propname, propmetadata in schema['properties'].items():
propmembername = jscom.propnametocppmembername(propname)
outputfile.write(' if (%s != NULL) {\n' % propmembername)
if propmetadata['type'] == 'array':
writedestructorlogicforlist(outputfile, propname, propmetadata)
outputfile.write((
' delete %s;\n'
) % propmembername)
outputfile.write(' }\n\n')
outputfile.write('}\n')
def writeconstructor(outputfile, cppname, schema):
outputfile.write('\n\n%s::%s()\n{\n' % (cppname, cppname))
for propname, propmetadata in schema['properties'].items():
outputfile.write(' %s = NULL;\n' % jscom.propnametocppmembername(propname))
outputfile.write('}\n')
def writeheaderincludes(outputfile, properties):
for propname, propmetadata in properties.items():
jsontype = propmetadata['type']
javatype = None
if jsontype == 'object':
javatype = propmetadata['javaType']
if jsontype == 'array':
javatype = propmetadata['items']['javaType']
if javatype is not None:
outputfile.write('#include "%s.h"\n' % jscom.javatypetocppname(javatype))
def schematocppmodels(inputfile, schema, outputdirectory):
if schema['type'] != 'object':
raise Exception('expecting object')
javatype = schema['javaType']
if not javatype or 0 == len(javatype):
raise Exception('missing "javaType" field')
cppclassname = jscom.javatypetocppname(javatype)
cpphfilename = os.path.join(outputdirectory, cppclassname + '.h')
cppifilename = os.path.join(outputdirectory, cppclassname + '.cpp')
with open(cpphfilename, 'w') as cpphfile:
jscom.writetopcomment(cpphfile, os.path.split(inputfile)[1], 'Model')
guarddefname = 'GEN_JSON_SCHEMA_MODEL__%s_H' % (cppclassname.upper())
cpphfile.write(string.Template("""
#ifndef ${guarddefname}
#define ${guarddefname}
#include "List.h"
#include "String.h"
""").substitute({'guarddefname': guarddefname}))
writeheaderincludes(cpphfile, schema['properties'])
cpphfile.write(string.Template("""
class ${cppclassname} {
public:
${cppclassname}();
virtual ~${cppclassname}();
""").substitute({'cppclassname': cppclassname}))
for propname, propmetadata in schema['properties'].items():
writeaccessorsheader(cpphfile, propname, propmetadata)
cpphfile.write('\n')
# Now add the instance variables for the object as well.
cpphfile.write('private:\n')
for propname, propmetadata in schema['properties'].items():
cpphfile.write(' %s* %s;\n' % (
jscom.propmetadatatocpptypename(propmetadata),
jscom.propnametocppmembername(propname)))
cpphfile.write((
'};\n\n'
'#endif // %s'
) % guarddefname)
with open(cppifilename, 'w') as cppifile:
jscom.writetopcomment(cppifile, os.path.split(inputfile)[1], 'Model')
cppifile.write('#include "%s.h"\n' % cppclassname)
writeconstructor(cppifile, cppclassname, schema)
writedestructor(cppifile, cppclassname, schema)
for propname, propmetadata in schema['properties'].items():
writeaccessors(cppifile, cppclassname, propname, propmetadata)
cppifile.write('\n')
# Now write out any subordinate structures.
for propname, propmetadata in schema['properties'].items():
jsontype = propmetadata['type']
if jsontype == 'array':
schematocppmodels(inputfile, propmetadata['items'], outputdirectory)
if jsontype == 'object':
schematocppmodels(inputfile, propmetadata, outputdirectory)
def main():
parser = argparse.ArgumentParser(description='Convert JSON schema to Haiku C++ Models')
parser.add_argument('-i', '--inputfile', required=True, help='The input filename containing the JSON schema')
parser.add_argument('--outputdirectory', help='The output directory where the C++ files should be written')
args = parser.parse_args()
outputdirectory = args.outputdirectory
if not outputdirectory:
outputdirectory = '.'
with open(args.inputfile) as inputfile:
schema = json.load(inputfile)
schematocppmodels(args.inputfile, schema, outputdirectory)
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
/*
* Copyright 2017-2018, Andrew Lindesay <apl@lindesay.co.nz>.
* Copyright 2017-2019, Andrew Lindesay <apl@lindesay.co.nz>.
* All rights reserved. Distributed under the terms of the MIT License.
*/
@ -162,9 +162,6 @@ PackageFillingPkgListener::ConsumePackage(const PackageInfoRef& package,
if (!pkg->ProminenceOrderingIsNull())
package->SetProminence(pkg->ProminenceOrdering());
if (!pkg->PkgChangelogContentIsNull())
package->SetChangelog(*(pkg->PkgChangelogContent()));
int32 countPkgScreenshots = pkg->CountPkgScreenshots();
for (i = 0; i < countPkgScreenshots; i++) {

View File

@ -1,345 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.116794
*/
#include "DumpExportPkg.h"
DumpExportPkg::DumpExportPkg()
{
fPkgChangelogContent = NULL;
fName = NULL;
fPkgVersions = NULL;
fDerivedRating = NULL;
fPkgScreenshots = NULL;
fProminenceOrdering = NULL;
fPkgCategories = NULL;
fModifyTimestamp = NULL;
}
DumpExportPkg::~DumpExportPkg()
{
int32 i;
if (fPkgChangelogContent != NULL) {
delete fPkgChangelogContent;
}
if (fName != NULL) {
delete fName;
}
if (fPkgVersions != NULL) {
int32 count = fPkgVersions->CountItems();
for (i = 0; i < count; i++)
delete fPkgVersions->ItemAt(i);
delete fPkgVersions;
}
if (fDerivedRating != NULL) {
delete fDerivedRating;
}
if (fPkgScreenshots != NULL) {
int32 count = fPkgScreenshots->CountItems();
for (i = 0; i < count; i++)
delete fPkgScreenshots->ItemAt(i);
delete fPkgScreenshots;
}
if (fProminenceOrdering != NULL) {
delete fProminenceOrdering;
}
if (fPkgCategories != NULL) {
int32 count = fPkgCategories->CountItems();
for (i = 0; i < count; i++)
delete fPkgCategories->ItemAt(i);
delete fPkgCategories;
}
if (fModifyTimestamp != NULL) {
delete fModifyTimestamp;
}
}
BString*
DumpExportPkg::PkgChangelogContent()
{
return fPkgChangelogContent;
}
void
DumpExportPkg::SetPkgChangelogContent(BString* value)
{
fPkgChangelogContent = value;
}
void
DumpExportPkg::SetPkgChangelogContentNull()
{
if (!PkgChangelogContentIsNull()) {
delete fPkgChangelogContent;
fPkgChangelogContent = NULL;
}
}
bool
DumpExportPkg::PkgChangelogContentIsNull()
{
return fPkgChangelogContent == NULL;
}
BString*
DumpExportPkg::Name()
{
return fName;
}
void
DumpExportPkg::SetName(BString* value)
{
fName = value;
}
void
DumpExportPkg::SetNameNull()
{
if (!NameIsNull()) {
delete fName;
fName = NULL;
}
}
bool
DumpExportPkg::NameIsNull()
{
return fName == NULL;
}
void
DumpExportPkg::AddToPkgVersions(DumpExportPkgVersion* value)
{
if (fPkgVersions == NULL)
fPkgVersions = new List<DumpExportPkgVersion*, true>();
fPkgVersions->Add(value);
}
void
DumpExportPkg::SetPkgVersions(List<DumpExportPkgVersion*, true>* value)
{
fPkgVersions = value;
}
int32
DumpExportPkg::CountPkgVersions()
{
if (fPkgVersions == NULL)
return 0;
return fPkgVersions->CountItems();
}
DumpExportPkgVersion*
DumpExportPkg::PkgVersionsItemAt(int32 index)
{
return fPkgVersions->ItemAt(index);
}
bool
DumpExportPkg::PkgVersionsIsNull()
{
return fPkgVersions == NULL;
}
double
DumpExportPkg::DerivedRating()
{
return *fDerivedRating;
}
void
DumpExportPkg::SetDerivedRating(double value)
{
if (DerivedRatingIsNull())
fDerivedRating = new double[1];
fDerivedRating[0] = value;
}
void
DumpExportPkg::SetDerivedRatingNull()
{
if (!DerivedRatingIsNull()) {
delete fDerivedRating;
fDerivedRating = NULL;
}
}
bool
DumpExportPkg::DerivedRatingIsNull()
{
return fDerivedRating == NULL;
}
void
DumpExportPkg::AddToPkgScreenshots(DumpExportPkgScreenshot* value)
{
if (fPkgScreenshots == NULL)
fPkgScreenshots = new List<DumpExportPkgScreenshot*, true>();
fPkgScreenshots->Add(value);
}
void
DumpExportPkg::SetPkgScreenshots(List<DumpExportPkgScreenshot*, true>* value)
{
fPkgScreenshots = value;
}
int32
DumpExportPkg::CountPkgScreenshots()
{
if (fPkgScreenshots == NULL)
return 0;
return fPkgScreenshots->CountItems();
}
DumpExportPkgScreenshot*
DumpExportPkg::PkgScreenshotsItemAt(int32 index)
{
return fPkgScreenshots->ItemAt(index);
}
bool
DumpExportPkg::PkgScreenshotsIsNull()
{
return fPkgScreenshots == NULL;
}
int64
DumpExportPkg::ProminenceOrdering()
{
return *fProminenceOrdering;
}
void
DumpExportPkg::SetProminenceOrdering(int64 value)
{
if (ProminenceOrderingIsNull())
fProminenceOrdering = new int64[1];
fProminenceOrdering[0] = value;
}
void
DumpExportPkg::SetProminenceOrderingNull()
{
if (!ProminenceOrderingIsNull()) {
delete fProminenceOrdering;
fProminenceOrdering = NULL;
}
}
bool
DumpExportPkg::ProminenceOrderingIsNull()
{
return fProminenceOrdering == NULL;
}
void
DumpExportPkg::AddToPkgCategories(DumpExportPkgCategory* value)
{
if (fPkgCategories == NULL)
fPkgCategories = new List<DumpExportPkgCategory*, true>();
fPkgCategories->Add(value);
}
void
DumpExportPkg::SetPkgCategories(List<DumpExportPkgCategory*, true>* value)
{
fPkgCategories = value;
}
int32
DumpExportPkg::CountPkgCategories()
{
if (fPkgCategories == NULL)
return 0;
return fPkgCategories->CountItems();
}
DumpExportPkgCategory*
DumpExportPkg::PkgCategoriesItemAt(int32 index)
{
return fPkgCategories->ItemAt(index);
}
bool
DumpExportPkg::PkgCategoriesIsNull()
{
return fPkgCategories == NULL;
}
int64
DumpExportPkg::ModifyTimestamp()
{
return *fModifyTimestamp;
}
void
DumpExportPkg::SetModifyTimestamp(int64 value)
{
if (ModifyTimestampIsNull())
fModifyTimestamp = new int64[1];
fModifyTimestamp[0] = value;
}
void
DumpExportPkg::SetModifyTimestampNull()
{
if (!ModifyTimestampIsNull()) {
delete fModifyTimestamp;
fModifyTimestamp = NULL;
}
}
bool
DumpExportPkg::ModifyTimestampIsNull()
{
return fModifyTimestamp == NULL;
}

View File

@ -1,79 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.115160
*/
#ifndef GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKG_H
#define GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKG_H
#include "List.h"
#include "String.h"
#include "DumpExportPkgVersion.h"
#include "DumpExportPkgScreenshot.h"
#include "DumpExportPkgCategory.h"
class DumpExportPkg {
public:
DumpExportPkg();
virtual ~DumpExportPkg();
BString* PkgChangelogContent();
void SetPkgChangelogContent(BString* value);
void SetPkgChangelogContentNull();
bool PkgChangelogContentIsNull();
BString* Name();
void SetName(BString* value);
void SetNameNull();
bool NameIsNull();
void AddToPkgVersions(DumpExportPkgVersion* value);
void SetPkgVersions(List<DumpExportPkgVersion*, true>* value);
int32 CountPkgVersions();
DumpExportPkgVersion* PkgVersionsItemAt(int32 index);
bool PkgVersionsIsNull();
double DerivedRating();
void SetDerivedRating(double value);
void SetDerivedRatingNull();
bool DerivedRatingIsNull();
void AddToPkgScreenshots(DumpExportPkgScreenshot* value);
void SetPkgScreenshots(List<DumpExportPkgScreenshot*, true>* value);
int32 CountPkgScreenshots();
DumpExportPkgScreenshot* PkgScreenshotsItemAt(int32 index);
bool PkgScreenshotsIsNull();
int64 ProminenceOrdering();
void SetProminenceOrdering(int64 value);
void SetProminenceOrderingNull();
bool ProminenceOrderingIsNull();
void AddToPkgCategories(DumpExportPkgCategory* value);
void SetPkgCategories(List<DumpExportPkgCategory*, true>* value);
int32 CountPkgCategories();
DumpExportPkgCategory* PkgCategoriesItemAt(int32 index);
bool PkgCategoriesIsNull();
int64 ModifyTimestamp();
void SetModifyTimestamp(int64 value);
void SetModifyTimestampNull();
bool ModifyTimestampIsNull();
private:
BString* fPkgChangelogContent;
BString* fName;
List <DumpExportPkgVersion*, true>* fPkgVersions;
double* fDerivedRating;
List <DumpExportPkgScreenshot*, true>* fPkgScreenshots;
int64* fProminenceOrdering;
List <DumpExportPkgCategory*, true>* fPkgCategories;
int64* fModifyTimestamp;
};
#endif // GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKG_H

View File

@ -1,52 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.118616
*/
#include "DumpExportPkgCategory.h"
DumpExportPkgCategory::DumpExportPkgCategory()
{
fCode = NULL;
}
DumpExportPkgCategory::~DumpExportPkgCategory()
{
if (fCode != NULL) {
delete fCode;
}
}
BString*
DumpExportPkgCategory::Code()
{
return fCode;
}
void
DumpExportPkgCategory::SetCode(BString* value)
{
fCode = value;
}
void
DumpExportPkgCategory::SetCodeNull()
{
if (!CodeIsNull()) {
delete fCode;
fCode = NULL;
}
}
bool
DumpExportPkgCategory::CodeIsNull()
{
return fCode == NULL;
}

View File

@ -1,28 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.118521
*/
#ifndef GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGCATEGORY_H
#define GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGCATEGORY_H
#include "List.h"
#include "String.h"
class DumpExportPkgCategory {
public:
DumpExportPkgCategory();
virtual ~DumpExportPkgCategory();
BString* Code();
void SetCode(BString* value);
void SetCodeNull();
bool CodeIsNull();
private:
BString* fCode;
};
#endif // GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGCATEGORY_H

View File

@ -1,89 +0,0 @@
/*
* Generated Listener Object
* source json-schema : dumpexport.json
* generated at : 2017-12-17T20:45:25.514143
*/
#ifndef GEN_JSON_SCHEMA_PARSER__SINGLEDUMPEXPORTPKGJSONLISTENER_H
#define GEN_JSON_SCHEMA_PARSER__SINGLEDUMPEXPORTPKGJSONLISTENER_H
#include <JsonEventListener.h>
#include "DumpExportPkg.h"
class AbstractStackedDumpExportPkgJsonListener;
class AbstractMainDumpExportPkgJsonListener : public BJsonEventListener {
friend class AbstractStackedDumpExportPkgJsonListener;
public:
AbstractMainDumpExportPkgJsonListener();
virtual ~AbstractMainDumpExportPkgJsonListener();
void HandleError(status_t status, int32 line, const char* message);
void Complete();
status_t ErrorStatus();
protected:
void SetStackedListener(
AbstractStackedDumpExportPkgJsonListener* listener);
status_t fErrorStatus;
AbstractStackedDumpExportPkgJsonListener* fStackedListener;
};
/*! Use this listener when you want to parse some JSON data that contains
just a single instance of DumpExportPkg.
*/
class SingleDumpExportPkgJsonListener
: public AbstractMainDumpExportPkgJsonListener {
friend class AbstractStackedDumpExportPkgJsonListener;
public:
SingleDumpExportPkgJsonListener();
virtual ~SingleDumpExportPkgJsonListener();
bool Handle(const BJsonEvent& event);
DumpExportPkg* Target();
private:
DumpExportPkg* fTarget;
};
/*! Concrete sub-classes of this class are able to respond to each
DumpExportPkg* instance as
it is parsed from the bulk container. When the stream is
finished, the Complete() method is invoked.
Note that the item object will be deleted after the Handle method
is invoked. The Handle method need not take responsibility
for deleting the item itself.
*/
class DumpExportPkgListener {
public:
virtual bool Handle(DumpExportPkg* item) = 0;
virtual void Complete() = 0;
};
/*! Use this listener, together with an instance of a concrete
subclass of DumpExportPkgListener
in order to parse the JSON data in a specific "bulk
container" format. Each time that an instance of
DumpExportPkg
is parsed, the instance item listener will be invoked.
*/
class BulkContainerDumpExportPkgJsonListener
: public AbstractMainDumpExportPkgJsonListener {
friend class AbstractStackedDumpExportPkgJsonListener;
public:
BulkContainerDumpExportPkgJsonListener(
DumpExportPkgListener* itemListener);
~BulkContainerDumpExportPkgJsonListener();
bool Handle(const BJsonEvent& event);
private:
DumpExportPkgListener* fItemListener;
};
#endif // GEN_JSON_SCHEMA_PARSER__SINGLEDUMPEXPORTPKGJSONLISTENER_H

View File

@ -1,204 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.118221
*/
#include "DumpExportPkgScreenshot.h"
DumpExportPkgScreenshot::DumpExportPkgScreenshot()
{
fOrdering = NULL;
fWidth = NULL;
fLength = NULL;
fCode = NULL;
fHeight = NULL;
}
DumpExportPkgScreenshot::~DumpExportPkgScreenshot()
{
if (fOrdering != NULL) {
delete fOrdering;
}
if (fWidth != NULL) {
delete fWidth;
}
if (fLength != NULL) {
delete fLength;
}
if (fCode != NULL) {
delete fCode;
}
if (fHeight != NULL) {
delete fHeight;
}
}
int64
DumpExportPkgScreenshot::Ordering()
{
return *fOrdering;
}
void
DumpExportPkgScreenshot::SetOrdering(int64 value)
{
if (OrderingIsNull())
fOrdering = new int64[1];
fOrdering[0] = value;
}
void
DumpExportPkgScreenshot::SetOrderingNull()
{
if (!OrderingIsNull()) {
delete fOrdering;
fOrdering = NULL;
}
}
bool
DumpExportPkgScreenshot::OrderingIsNull()
{
return fOrdering == NULL;
}
int64
DumpExportPkgScreenshot::Width()
{
return *fWidth;
}
void
DumpExportPkgScreenshot::SetWidth(int64 value)
{
if (WidthIsNull())
fWidth = new int64[1];
fWidth[0] = value;
}
void
DumpExportPkgScreenshot::SetWidthNull()
{
if (!WidthIsNull()) {
delete fWidth;
fWidth = NULL;
}
}
bool
DumpExportPkgScreenshot::WidthIsNull()
{
return fWidth == NULL;
}
int64
DumpExportPkgScreenshot::Length()
{
return *fLength;
}
void
DumpExportPkgScreenshot::SetLength(int64 value)
{
if (LengthIsNull())
fLength = new int64[1];
fLength[0] = value;
}
void
DumpExportPkgScreenshot::SetLengthNull()
{
if (!LengthIsNull()) {
delete fLength;
fLength = NULL;
}
}
bool
DumpExportPkgScreenshot::LengthIsNull()
{
return fLength == NULL;
}
BString*
DumpExportPkgScreenshot::Code()
{
return fCode;
}
void
DumpExportPkgScreenshot::SetCode(BString* value)
{
fCode = value;
}
void
DumpExportPkgScreenshot::SetCodeNull()
{
if (!CodeIsNull()) {
delete fCode;
fCode = NULL;
}
}
bool
DumpExportPkgScreenshot::CodeIsNull()
{
return fCode == NULL;
}
int64
DumpExportPkgScreenshot::Height()
{
return *fHeight;
}
void
DumpExportPkgScreenshot::SetHeight(int64 value)
{
if (HeightIsNull())
fHeight = new int64[1];
fHeight[0] = value;
}
void
DumpExportPkgScreenshot::SetHeightNull()
{
if (!HeightIsNull()) {
delete fHeight;
fHeight = NULL;
}
}
bool
DumpExportPkgScreenshot::HeightIsNull()
{
return fHeight == NULL;
}

View File

@ -1,56 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.118049
*/
#ifndef GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGSCREENSHOT_H
#define GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGSCREENSHOT_H
#include "List.h"
#include "String.h"
class DumpExportPkgScreenshot {
public:
DumpExportPkgScreenshot();
virtual ~DumpExportPkgScreenshot();
int64 Ordering();
void SetOrdering(int64 value);
void SetOrderingNull();
bool OrderingIsNull();
int64 Width();
void SetWidth(int64 value);
void SetWidthNull();
bool WidthIsNull();
int64 Length();
void SetLength(int64 value);
void SetLengthNull();
bool LengthIsNull();
BString* Code();
void SetCode(BString* value);
void SetCodeNull();
bool CodeIsNull();
int64 Height();
void SetHeight(int64 value);
void SetHeightNull();
bool HeightIsNull();
private:
int64* fOrdering;
int64* fWidth;
int64* fLength;
BString* fCode;
int64* fHeight;
};
#endif // GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGSCREENSHOT_H

View File

@ -1,380 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.117543
*/
#include "DumpExportPkgVersion.h"
DumpExportPkgVersion::DumpExportPkgVersion()
{
fMajor = NULL;
fPayloadLength = NULL;
fDescription = NULL;
fTitle = NULL;
fSummary = NULL;
fMicro = NULL;
fPreRelease = NULL;
fArchitectureCode = NULL;
fMinor = NULL;
fRevision = NULL;
}
DumpExportPkgVersion::~DumpExportPkgVersion()
{
if (fMajor != NULL) {
delete fMajor;
}
if (fPayloadLength != NULL) {
delete fPayloadLength;
}
if (fDescription != NULL) {
delete fDescription;
}
if (fTitle != NULL) {
delete fTitle;
}
if (fSummary != NULL) {
delete fSummary;
}
if (fMicro != NULL) {
delete fMicro;
}
if (fPreRelease != NULL) {
delete fPreRelease;
}
if (fArchitectureCode != NULL) {
delete fArchitectureCode;
}
if (fMinor != NULL) {
delete fMinor;
}
if (fRevision != NULL) {
delete fRevision;
}
}
BString*
DumpExportPkgVersion::Major()
{
return fMajor;
}
void
DumpExportPkgVersion::SetMajor(BString* value)
{
fMajor = value;
}
void
DumpExportPkgVersion::SetMajorNull()
{
if (!MajorIsNull()) {
delete fMajor;
fMajor = NULL;
}
}
bool
DumpExportPkgVersion::MajorIsNull()
{
return fMajor == NULL;
}
int64
DumpExportPkgVersion::PayloadLength()
{
return *fPayloadLength;
}
void
DumpExportPkgVersion::SetPayloadLength(int64 value)
{
if (PayloadLengthIsNull())
fPayloadLength = new int64[1];
fPayloadLength[0] = value;
}
void
DumpExportPkgVersion::SetPayloadLengthNull()
{
if (!PayloadLengthIsNull()) {
delete fPayloadLength;
fPayloadLength = NULL;
}
}
bool
DumpExportPkgVersion::PayloadLengthIsNull()
{
return fPayloadLength == NULL;
}
BString*
DumpExportPkgVersion::Description()
{
return fDescription;
}
void
DumpExportPkgVersion::SetDescription(BString* value)
{
fDescription = value;
}
void
DumpExportPkgVersion::SetDescriptionNull()
{
if (!DescriptionIsNull()) {
delete fDescription;
fDescription = NULL;
}
}
bool
DumpExportPkgVersion::DescriptionIsNull()
{
return fDescription == NULL;
}
BString*
DumpExportPkgVersion::Title()
{
return fTitle;
}
void
DumpExportPkgVersion::SetTitle(BString* value)
{
fTitle = value;
}
void
DumpExportPkgVersion::SetTitleNull()
{
if (!TitleIsNull()) {
delete fTitle;
fTitle = NULL;
}
}
bool
DumpExportPkgVersion::TitleIsNull()
{
return fTitle == NULL;
}
BString*
DumpExportPkgVersion::Summary()
{
return fSummary;
}
void
DumpExportPkgVersion::SetSummary(BString* value)
{
fSummary = value;
}
void
DumpExportPkgVersion::SetSummaryNull()
{
if (!SummaryIsNull()) {
delete fSummary;
fSummary = NULL;
}
}
bool
DumpExportPkgVersion::SummaryIsNull()
{
return fSummary == NULL;
}
BString*
DumpExportPkgVersion::Micro()
{
return fMicro;
}
void
DumpExportPkgVersion::SetMicro(BString* value)
{
fMicro = value;
}
void
DumpExportPkgVersion::SetMicroNull()
{
if (!MicroIsNull()) {
delete fMicro;
fMicro = NULL;
}
}
bool
DumpExportPkgVersion::MicroIsNull()
{
return fMicro == NULL;
}
BString*
DumpExportPkgVersion::PreRelease()
{
return fPreRelease;
}
void
DumpExportPkgVersion::SetPreRelease(BString* value)
{
fPreRelease = value;
}
void
DumpExportPkgVersion::SetPreReleaseNull()
{
if (!PreReleaseIsNull()) {
delete fPreRelease;
fPreRelease = NULL;
}
}
bool
DumpExportPkgVersion::PreReleaseIsNull()
{
return fPreRelease == NULL;
}
BString*
DumpExportPkgVersion::ArchitectureCode()
{
return fArchitectureCode;
}
void
DumpExportPkgVersion::SetArchitectureCode(BString* value)
{
fArchitectureCode = value;
}
void
DumpExportPkgVersion::SetArchitectureCodeNull()
{
if (!ArchitectureCodeIsNull()) {
delete fArchitectureCode;
fArchitectureCode = NULL;
}
}
bool
DumpExportPkgVersion::ArchitectureCodeIsNull()
{
return fArchitectureCode == NULL;
}
BString*
DumpExportPkgVersion::Minor()
{
return fMinor;
}
void
DumpExportPkgVersion::SetMinor(BString* value)
{
fMinor = value;
}
void
DumpExportPkgVersion::SetMinorNull()
{
if (!MinorIsNull()) {
delete fMinor;
fMinor = NULL;
}
}
bool
DumpExportPkgVersion::MinorIsNull()
{
return fMinor == NULL;
}
int64
DumpExportPkgVersion::Revision()
{
return *fRevision;
}
void
DumpExportPkgVersion::SetRevision(int64 value)
{
if (RevisionIsNull())
fRevision = new int64[1];
fRevision[0] = value;
}
void
DumpExportPkgVersion::SetRevisionNull()
{
if (!RevisionIsNull()) {
delete fRevision;
fRevision = NULL;
}
}
bool
DumpExportPkgVersion::RevisionIsNull()
{
return fRevision == NULL;
}

View File

@ -1,84 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:17.117333
*/
#ifndef GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGVERSION_H
#define GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGVERSION_H
#include "List.h"
#include "String.h"
class DumpExportPkgVersion {
public:
DumpExportPkgVersion();
virtual ~DumpExportPkgVersion();
BString* Major();
void SetMajor(BString* value);
void SetMajorNull();
bool MajorIsNull();
int64 PayloadLength();
void SetPayloadLength(int64 value);
void SetPayloadLengthNull();
bool PayloadLengthIsNull();
BString* Description();
void SetDescription(BString* value);
void SetDescriptionNull();
bool DescriptionIsNull();
BString* Title();
void SetTitle(BString* value);
void SetTitleNull();
bool TitleIsNull();
BString* Summary();
void SetSummary(BString* value);
void SetSummaryNull();
bool SummaryIsNull();
BString* Micro();
void SetMicro(BString* value);
void SetMicroNull();
bool MicroIsNull();
BString* PreRelease();
void SetPreRelease(BString* value);
void SetPreReleaseNull();
bool PreReleaseIsNull();
BString* ArchitectureCode();
void SetArchitectureCode(BString* value);
void SetArchitectureCodeNull();
bool ArchitectureCodeIsNull();
BString* Minor();
void SetMinor(BString* value);
void SetMinorNull();
bool MinorIsNull();
int64 Revision();
void SetRevision(int64 value);
void SetRevisionNull();
bool RevisionIsNull();
private:
BString* fMajor;
int64* fPayloadLength;
BString* fDescription;
BString* fTitle;
BString* fSummary;
BString* fMicro;
BString* fPreRelease;
BString* fArchitectureCode;
BString* fMinor;
int64* fRevision;
};
#endif // GEN_JSON_SCHEMA_MODEL__DUMPEXPORTPKGVERSION_H

View File

@ -1,209 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:33.021497
*/
#include "DumpExportRepository.h"
DumpExportRepository::DumpExportRepository()
{
fInformationUrl = NULL;
fCode = NULL;
fRepositorySources = NULL;
fName = NULL;
fDescription = NULL;
}
DumpExportRepository::~DumpExportRepository()
{
int32 i;
if (fInformationUrl != NULL) {
delete fInformationUrl;
}
if (fCode != NULL) {
delete fCode;
}
if (fRepositorySources != NULL) {
int32 count = fRepositorySources->CountItems();
for (i = 0; i < count; i++)
delete fRepositorySources->ItemAt(i);
delete fRepositorySources;
}
if (fName != NULL) {
delete fName;
}
if (fDescription != NULL) {
delete fDescription;
}
}
BString*
DumpExportRepository::InformationUrl()
{
return fInformationUrl;
}
void
DumpExportRepository::SetInformationUrl(BString* value)
{
fInformationUrl = value;
}
void
DumpExportRepository::SetInformationUrlNull()
{
if (!InformationUrlIsNull()) {
delete fInformationUrl;
fInformationUrl = NULL;
}
}
bool
DumpExportRepository::InformationUrlIsNull()
{
return fInformationUrl == NULL;
}
BString*
DumpExportRepository::Code()
{
return fCode;
}
void
DumpExportRepository::SetCode(BString* value)
{
fCode = value;
}
void
DumpExportRepository::SetCodeNull()
{
if (!CodeIsNull()) {
delete fCode;
fCode = NULL;
}
}
bool
DumpExportRepository::CodeIsNull()
{
return fCode == NULL;
}
void
DumpExportRepository::AddToRepositorySources(DumpExportRepositorySource* value)
{
if (fRepositorySources == NULL)
fRepositorySources = new List<DumpExportRepositorySource*, true>();
fRepositorySources->Add(value);
}
void
DumpExportRepository::SetRepositorySources(List<DumpExportRepositorySource*, true>* value)
{
fRepositorySources = value;
}
int32
DumpExportRepository::CountRepositorySources()
{
if (fRepositorySources == NULL)
return 0;
return fRepositorySources->CountItems();
}
DumpExportRepositorySource*
DumpExportRepository::RepositorySourcesItemAt(int32 index)
{
return fRepositorySources->ItemAt(index);
}
bool
DumpExportRepository::RepositorySourcesIsNull()
{
return fRepositorySources == NULL;
}
BString*
DumpExportRepository::Name()
{
return fName;
}
void
DumpExportRepository::SetName(BString* value)
{
fName = value;
}
void
DumpExportRepository::SetNameNull()
{
if (!NameIsNull()) {
delete fName;
fName = NULL;
}
}
bool
DumpExportRepository::NameIsNull()
{
return fName == NULL;
}
BString*
DumpExportRepository::Description()
{
return fDescription;
}
void
DumpExportRepository::SetDescription(BString* value)
{
fDescription = value;
}
void
DumpExportRepository::SetDescriptionNull()
{
if (!DescriptionIsNull()) {
delete fDescription;
fDescription = NULL;
}
}
bool
DumpExportRepository::DescriptionIsNull()
{
return fDescription == NULL;
}

View File

@ -1,54 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:33.020747
*/
#ifndef GEN_JSON_SCHEMA_MODEL__DUMPEXPORTREPOSITORY_H
#define GEN_JSON_SCHEMA_MODEL__DUMPEXPORTREPOSITORY_H
#include "List.h"
#include "String.h"
#include "DumpExportRepositorySource.h"
class DumpExportRepository {
public:
DumpExportRepository();
virtual ~DumpExportRepository();
BString* InformationUrl();
void SetInformationUrl(BString* value);
void SetInformationUrlNull();
bool InformationUrlIsNull();
BString* Code();
void SetCode(BString* value);
void SetCodeNull();
bool CodeIsNull();
void AddToRepositorySources(DumpExportRepositorySource* value);
void SetRepositorySources(List<DumpExportRepositorySource*, true>* value);
int32 CountRepositorySources();
DumpExportRepositorySource* RepositorySourcesItemAt(int32 index);
bool RepositorySourcesIsNull();
BString* Name();
void SetName(BString* value);
void SetNameNull();
bool NameIsNull();
BString* Description();
void SetDescription(BString* value);
void SetDescriptionNull();
bool DescriptionIsNull();
private:
BString* fInformationUrl;
BString* fCode;
List <DumpExportRepositorySource*, true>* fRepositorySources;
BString* fName;
BString* fDescription;
};
#endif // GEN_JSON_SCHEMA_MODEL__DUMPEXPORTREPOSITORY_H

View File

@ -1,917 +0,0 @@
/*
* Generated Listener Object
* source json-schema : dumpexport.json
* generated at : 2017-12-18T23:07:02.401765
*/
#include "DumpExportRepositoryJsonListener.h"
#include "List.h"
#include <stdio.h>
// #pragma mark - private interfaces for the stacked listeners
/*! This class is the top level of the stacked listeners. The stack structure
is maintained in a linked list and sub-classes implement specific behaviors
depending where in the parse tree the stacked listener is working at.
*/
class AbstractStackedDumpExportRepositoryJsonListener : public BJsonEventListener {
public:
AbstractStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~AbstractStackedDumpExportRepositoryJsonListener();
void HandleError(status_t status, int32 line, const char* message);
void Complete();
status_t ErrorStatus();
AbstractStackedDumpExportRepositoryJsonListener* Parent();
virtual bool WillPop();
protected:
AbstractMainDumpExportRepositoryJsonListener* fMainListener;
bool Pop();
void Push(AbstractStackedDumpExportRepositoryJsonListener* stackedListener);
private:
AbstractStackedDumpExportRepositoryJsonListener* fParent;
};
class GeneralArrayStackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
GeneralArrayStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~GeneralArrayStackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
};
class GeneralObjectStackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
GeneralObjectStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~GeneralObjectStackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
};
class DumpExportRepository_StackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
DumpExportRepository_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~DumpExportRepository_StackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
DumpExportRepository* Target();
protected:
DumpExportRepository* fTarget;
BString fNextItemName;
};
class DumpExportRepository_List_StackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
DumpExportRepository_List_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~DumpExportRepository_List_StackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
List<DumpExportRepository*, true>* Target(); // list of %s pointers
private:
List<DumpExportRepository*, true>* fTarget;
};
class DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
DumpExportRepositorySource* Target();
protected:
DumpExportRepositorySource* fTarget;
BString fNextItemName;
};
class DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent);
~DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
List<DumpExportRepositorySource*, true>* Target(); // list of %s pointers
private:
List<DumpExportRepositorySource*, true>* fTarget;
};
class ItemEmittingStackedDumpExportRepositoryJsonListener : public DumpExportRepository_StackedDumpExportRepositoryJsonListener {
public:
ItemEmittingStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent,
DumpExportRepositoryListener* itemListener);
~ItemEmittingStackedDumpExportRepositoryJsonListener();
bool WillPop();
private:
DumpExportRepositoryListener* fItemListener;
};
class BulkContainerStackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
BulkContainerStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent,
DumpExportRepositoryListener* itemListener);
~BulkContainerStackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
private:
BString fNextItemName;
DumpExportRepositoryListener* fItemListener;
};
class BulkContainerItemsStackedDumpExportRepositoryJsonListener : public AbstractStackedDumpExportRepositoryJsonListener {
public:
BulkContainerItemsStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent,
DumpExportRepositoryListener* itemListener);
~BulkContainerItemsStackedDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
bool WillPop();
private:
DumpExportRepositoryListener* fItemListener;
};
// #pragma mark - implementations for the stacked listeners
AbstractStackedDumpExportRepositoryJsonListener::AbstractStackedDumpExportRepositoryJsonListener (
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
{
fMainListener = mainListener;
fParent = parent;
}
AbstractStackedDumpExportRepositoryJsonListener::~AbstractStackedDumpExportRepositoryJsonListener()
{
}
void
AbstractStackedDumpExportRepositoryJsonListener::HandleError(status_t status, int32 line, const char* message)
{
fMainListener->HandleError(status, line, message);
}
void
AbstractStackedDumpExportRepositoryJsonListener::Complete()
{
fMainListener->Complete();
}
status_t
AbstractStackedDumpExportRepositoryJsonListener::ErrorStatus()
{
return fMainListener->ErrorStatus();
}
AbstractStackedDumpExportRepositoryJsonListener*
AbstractStackedDumpExportRepositoryJsonListener::Parent()
{
return fParent;
}
void
AbstractStackedDumpExportRepositoryJsonListener::Push(AbstractStackedDumpExportRepositoryJsonListener* stackedListener)
{
fMainListener->SetStackedListener(stackedListener);
}
bool
AbstractStackedDumpExportRepositoryJsonListener::WillPop()
{
return true;
}
bool
AbstractStackedDumpExportRepositoryJsonListener::Pop()
{
bool result = WillPop();
fMainListener->SetStackedListener(fParent);
return result;
}
GeneralObjectStackedDumpExportRepositoryJsonListener::GeneralObjectStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
}
GeneralObjectStackedDumpExportRepositoryJsonListener::~GeneralObjectStackedDumpExportRepositoryJsonListener()
{
}
bool
GeneralObjectStackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_OBJECT_NAME:
case B_JSON_NUMBER:
case B_JSON_STRING:
case B_JSON_TRUE:
case B_JSON_FALSE:
case B_JSON_NULL:
// ignore
break;
case B_JSON_OBJECT_START:
Push(new GeneralObjectStackedDumpExportRepositoryJsonListener(fMainListener, this));
break;
case B_JSON_ARRAY_START:
Push(new GeneralArrayStackedDumpExportRepositoryJsonListener(fMainListener, this));
break;
case B_JSON_ARRAY_END:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal state - unexpected end of array");
break;
case B_JSON_OBJECT_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
}
return ErrorStatus() == B_OK;
}
GeneralArrayStackedDumpExportRepositoryJsonListener::GeneralArrayStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
}
GeneralArrayStackedDumpExportRepositoryJsonListener::~GeneralArrayStackedDumpExportRepositoryJsonListener()
{
}
bool
GeneralArrayStackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_OBJECT_NAME:
case B_JSON_NUMBER:
case B_JSON_STRING:
case B_JSON_TRUE:
case B_JSON_FALSE:
case B_JSON_NULL:
// ignore
break;
case B_JSON_OBJECT_START:
Push(new GeneralObjectStackedDumpExportRepositoryJsonListener(fMainListener, this));
break;
case B_JSON_ARRAY_START:
Push(new GeneralArrayStackedDumpExportRepositoryJsonListener(fMainListener, this));
break;
case B_JSON_OBJECT_END:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal state - unexpected end of object");
break;
case B_JSON_ARRAY_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
}
return ErrorStatus() == B_OK;
}
DumpExportRepository_StackedDumpExportRepositoryJsonListener::DumpExportRepository_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fTarget = new DumpExportRepository();
}
DumpExportRepository_StackedDumpExportRepositoryJsonListener::~DumpExportRepository_StackedDumpExportRepositoryJsonListener()
{
}
DumpExportRepository*
DumpExportRepository_StackedDumpExportRepositoryJsonListener::Target()
{
return fTarget;
}
bool
DumpExportRepository_StackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_ARRAY_END:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal state - unexpected start of array");
break;
case B_JSON_OBJECT_NAME:
fNextItemName = event.Content();
break;
case B_JSON_OBJECT_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
case B_JSON_STRING:
if (fNextItemName == "informationUrl")
fTarget->SetInformationUrl(new BString(event.Content()));
if (fNextItemName == "code")
fTarget->SetCode(new BString(event.Content()));
if (fNextItemName == "name")
fTarget->SetName(new BString(event.Content()));
if (fNextItemName == "description")
fTarget->SetDescription(new BString(event.Content()));
fNextItemName.SetTo("");
break;
case B_JSON_TRUE:
fNextItemName.SetTo("");
break;
case B_JSON_FALSE:
fNextItemName.SetTo("");
break;
case B_JSON_NULL:
{
if (fNextItemName == "informationUrl")
fTarget->SetInformationUrlNull();
if (fNextItemName == "code")
fTarget->SetCodeNull();
if (fNextItemName == "name")
fTarget->SetNameNull();
if (fNextItemName == "description")
fTarget->SetDescriptionNull();
fNextItemName.SetTo("");
break;
}
case B_JSON_NUMBER:
{
fNextItemName.SetTo("");
break;
}
case B_JSON_OBJECT_START:
{
if (1 == 1) {
GeneralObjectStackedDumpExportRepositoryJsonListener* nextListener = new GeneralObjectStackedDumpExportRepositoryJsonListener(fMainListener, this);
Push(nextListener);
}
fNextItemName.SetTo("");
break;
}
case B_JSON_ARRAY_START:
{
if (fNextItemName == "repositorySources") {
DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener* nextListener = new DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener(fMainListener, this);
fTarget->SetRepositorySources(nextListener->Target());
Push(nextListener);
}
else if (1 == 1) {
AbstractStackedDumpExportRepositoryJsonListener* nextListener = new GeneralArrayStackedDumpExportRepositoryJsonListener(fMainListener, this);
Push(nextListener);
}
fNextItemName.SetTo("");
break;
}
}
return ErrorStatus() == B_OK;
}
DumpExportRepository_List_StackedDumpExportRepositoryJsonListener::DumpExportRepository_List_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fTarget = new List<DumpExportRepository*, true>();
}
DumpExportRepository_List_StackedDumpExportRepositoryJsonListener::~DumpExportRepository_List_StackedDumpExportRepositoryJsonListener()
{
}
List<DumpExportRepository*, true>*
DumpExportRepository_List_StackedDumpExportRepositoryJsonListener::Target()
{
return fTarget;
}
bool
DumpExportRepository_List_StackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_ARRAY_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
case B_JSON_OBJECT_START:
{
DumpExportRepository_StackedDumpExportRepositoryJsonListener* nextListener =
new DumpExportRepository_StackedDumpExportRepositoryJsonListener(fMainListener, this);
fTarget->Add(nextListener->Target());
Push(nextListener);
break;
}
default:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
"illegal state - unexpected json event parsing an array of DumpExportRepository");
break;
}
return ErrorStatus() == B_OK;
}
DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener::DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fTarget = new DumpExportRepositorySource();
}
DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener::~DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener()
{
}
DumpExportRepositorySource*
DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener::Target()
{
return fTarget;
}
bool
DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_ARRAY_END:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal state - unexpected start of array");
break;
case B_JSON_OBJECT_NAME:
fNextItemName = event.Content();
break;
case B_JSON_OBJECT_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
case B_JSON_STRING:
if (fNextItemName == "url")
fTarget->SetUrl(new BString(event.Content()));
if (fNextItemName == "repoInfoUrl")
fTarget->SetRepoInfoUrl(new BString(event.Content()));
if (fNextItemName == "code")
fTarget->SetCode(new BString(event.Content()));
fNextItemName.SetTo("");
break;
case B_JSON_TRUE:
fNextItemName.SetTo("");
break;
case B_JSON_FALSE:
fNextItemName.SetTo("");
break;
case B_JSON_NULL:
{
if (fNextItemName == "url")
fTarget->SetUrlNull();
if (fNextItemName == "repoInfoUrl")
fTarget->SetRepoInfoUrlNull();
if (fNextItemName == "code")
fTarget->SetCodeNull();
fNextItemName.SetTo("");
break;
}
case B_JSON_NUMBER:
{
fNextItemName.SetTo("");
break;
}
case B_JSON_OBJECT_START:
{
if (1 == 1) {
GeneralObjectStackedDumpExportRepositoryJsonListener* nextListener = new GeneralObjectStackedDumpExportRepositoryJsonListener(fMainListener, this);
Push(nextListener);
}
fNextItemName.SetTo("");
break;
}
case B_JSON_ARRAY_START:
{
if (1 == 1) {
AbstractStackedDumpExportRepositoryJsonListener* nextListener = new GeneralArrayStackedDumpExportRepositoryJsonListener(fMainListener, this);
Push(nextListener);
}
fNextItemName.SetTo("");
break;
}
}
return ErrorStatus() == B_OK;
}
DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener::DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener,
AbstractStackedDumpExportRepositoryJsonListener* parent)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fTarget = new List<DumpExportRepositorySource*, true>();
}
DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener::~DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener()
{
}
List<DumpExportRepositorySource*, true>*
DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener::Target()
{
return fTarget;
}
bool
DumpExportRepositorySource_List_StackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_ARRAY_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
case B_JSON_OBJECT_START:
{
DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener* nextListener =
new DumpExportRepositorySource_StackedDumpExportRepositoryJsonListener(fMainListener, this);
fTarget->Add(nextListener->Target());
Push(nextListener);
break;
}
default:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
"illegal state - unexpected json event parsing an array of DumpExportRepositorySource");
break;
}
return ErrorStatus() == B_OK;
}
ItemEmittingStackedDumpExportRepositoryJsonListener::ItemEmittingStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener, AbstractStackedDumpExportRepositoryJsonListener* parent,
DumpExportRepositoryListener* itemListener)
:
DumpExportRepository_StackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fItemListener = itemListener;
}
ItemEmittingStackedDumpExportRepositoryJsonListener::~ItemEmittingStackedDumpExportRepositoryJsonListener()
{
}
bool
ItemEmittingStackedDumpExportRepositoryJsonListener::WillPop()
{
bool result = fItemListener->Handle(fTarget);
delete fTarget;
fTarget = NULL;
return result;
}
BulkContainerStackedDumpExportRepositoryJsonListener::BulkContainerStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener, AbstractStackedDumpExportRepositoryJsonListener* parent,
DumpExportRepositoryListener* itemListener)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fItemListener = itemListener;
}
BulkContainerStackedDumpExportRepositoryJsonListener::~BulkContainerStackedDumpExportRepositoryJsonListener()
{
}
bool
BulkContainerStackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_ARRAY_END:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal state - unexpected start of array");
break;
case B_JSON_OBJECT_NAME:
fNextItemName = event.Content();
break;
case B_JSON_OBJECT_START:
Push(new GeneralObjectStackedDumpExportRepositoryJsonListener(fMainListener, this));
break;
case B_JSON_ARRAY_START:
if (fNextItemName == "items")
Push(new BulkContainerItemsStackedDumpExportRepositoryJsonListener(fMainListener, this, fItemListener));
else
Push(new GeneralArrayStackedDumpExportRepositoryJsonListener(fMainListener, this));
break;
case B_JSON_OBJECT_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
default:
// ignore
break;
}
return ErrorStatus() == B_OK;
}
BulkContainerItemsStackedDumpExportRepositoryJsonListener::BulkContainerItemsStackedDumpExportRepositoryJsonListener(
AbstractMainDumpExportRepositoryJsonListener* mainListener, AbstractStackedDumpExportRepositoryJsonListener* parent,
DumpExportRepositoryListener* itemListener)
:
AbstractStackedDumpExportRepositoryJsonListener(mainListener, parent)
{
fItemListener = itemListener;
}
BulkContainerItemsStackedDumpExportRepositoryJsonListener::~BulkContainerItemsStackedDumpExportRepositoryJsonListener()
{
}
bool
BulkContainerItemsStackedDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
switch (event.EventType()) {
case B_JSON_OBJECT_START:
Push(new ItemEmittingStackedDumpExportRepositoryJsonListener(fMainListener, this, fItemListener));
break;
case B_JSON_ARRAY_END:
{
bool status = Pop() && (ErrorStatus() == B_OK);
delete this;
return status;
}
default:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal state - unexpected json event");
break;
}
return ErrorStatus() == B_OK;
}
bool
BulkContainerItemsStackedDumpExportRepositoryJsonListener::WillPop()
{
fItemListener->Complete();
return true;
}
// #pragma mark - implementations for the main listeners
AbstractMainDumpExportRepositoryJsonListener::AbstractMainDumpExportRepositoryJsonListener()
{
fStackedListener = NULL;
fErrorStatus = B_OK;
}
AbstractMainDumpExportRepositoryJsonListener::~AbstractMainDumpExportRepositoryJsonListener()
{
}
void
AbstractMainDumpExportRepositoryJsonListener::HandleError(status_t status, int32 line, const char* message)
{
if (message != NULL) {
fprintf(stderr, "an error has arisen processing json for 'DumpExportRepository'; %s\n", message);
} else {
fprintf(stderr, "an error has arisen processing json for 'DumpExportRepository'\n");
}
fErrorStatus = status;
}
void
AbstractMainDumpExportRepositoryJsonListener::Complete()
{
}
status_t
AbstractMainDumpExportRepositoryJsonListener::ErrorStatus()
{
return fErrorStatus;
}
void
AbstractMainDumpExportRepositoryJsonListener::SetStackedListener(
AbstractStackedDumpExportRepositoryJsonListener* stackedListener)
{
fStackedListener = stackedListener;
}
SingleDumpExportRepositoryJsonListener::SingleDumpExportRepositoryJsonListener()
:
AbstractMainDumpExportRepositoryJsonListener()
{
fTarget = NULL;
}
SingleDumpExportRepositoryJsonListener::~SingleDumpExportRepositoryJsonListener()
{
}
bool
SingleDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
if (fErrorStatus != B_OK)
return false;
if (fStackedListener != NULL)
return fStackedListener->Handle(event);
switch (event.EventType()) {
case B_JSON_OBJECT_START:
{
DumpExportRepository_StackedDumpExportRepositoryJsonListener* nextListener = new DumpExportRepository_StackedDumpExportRepositoryJsonListener(
this, NULL);
fTarget = nextListener->Target();
SetStackedListener(nextListener);
break;
}
default:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
"illegal state - unexpected json event parsing top level for DumpExportRepository");
break;
}
return ErrorStatus() == B_OK;
}
DumpExportRepository*
SingleDumpExportRepositoryJsonListener::Target()
{
return fTarget;
}
BulkContainerDumpExportRepositoryJsonListener::BulkContainerDumpExportRepositoryJsonListener(
DumpExportRepositoryListener* itemListener) : AbstractMainDumpExportRepositoryJsonListener()
{
fItemListener = itemListener;
}
BulkContainerDumpExportRepositoryJsonListener::~BulkContainerDumpExportRepositoryJsonListener()
{
}
bool
BulkContainerDumpExportRepositoryJsonListener::Handle(const BJsonEvent& event)
{
if (fErrorStatus != B_OK)
return false;
if (fStackedListener != NULL)
return fStackedListener->Handle(event);
switch (event.EventType()) {
case B_JSON_OBJECT_START:
{
BulkContainerStackedDumpExportRepositoryJsonListener* nextListener =
new BulkContainerStackedDumpExportRepositoryJsonListener(
this, NULL, fItemListener);
SetStackedListener(nextListener);
return true;
break;
}
default:
HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
"illegal state - unexpected json event parsing top level for BulkContainerDumpExportRepositoryJsonListener");
break;
}
return ErrorStatus() == B_OK;
}

View File

@ -1,89 +0,0 @@
/*
* Generated Listener Object
* source json-schema : dumpexport.json
* generated at : 2017-12-18T23:07:02.399681
*/
#ifndef GEN_JSON_SCHEMA_PARSER__SINGLEDUMPEXPORTREPOSITORYJSONLISTENER_H
#define GEN_JSON_SCHEMA_PARSER__SINGLEDUMPEXPORTREPOSITORYJSONLISTENER_H
#include <JsonEventListener.h>
#include "DumpExportRepository.h"
class AbstractStackedDumpExportRepositoryJsonListener;
class AbstractMainDumpExportRepositoryJsonListener : public BJsonEventListener {
friend class AbstractStackedDumpExportRepositoryJsonListener;
public:
AbstractMainDumpExportRepositoryJsonListener();
virtual ~AbstractMainDumpExportRepositoryJsonListener();
void HandleError(status_t status, int32 line, const char* message);
void Complete();
status_t ErrorStatus();
protected:
void SetStackedListener(
AbstractStackedDumpExportRepositoryJsonListener* listener);
status_t fErrorStatus;
AbstractStackedDumpExportRepositoryJsonListener* fStackedListener;
};
/*! Use this listener when you want to parse some JSON data that contains
just a single instance of DumpExportRepository.
*/
class SingleDumpExportRepositoryJsonListener
: public AbstractMainDumpExportRepositoryJsonListener {
friend class AbstractStackedDumpExportRepositoryJsonListener;
public:
SingleDumpExportRepositoryJsonListener();
virtual ~SingleDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
DumpExportRepository* Target();
private:
DumpExportRepository* fTarget;
};
/*! Concrete sub-classes of this class are able to respond to each
DumpExportRepository* instance as
it is parsed from the bulk container. When the stream is
finished, the Complete() method is invoked.
Note that the item object will be deleted after the Handle method
is invoked. The Handle method need not take responsibility
for deleting the item itself.
*/
class DumpExportRepositoryListener {
public:
virtual bool Handle(DumpExportRepository* item) = 0;
virtual void Complete() = 0;
};
/*! Use this listener, together with an instance of a concrete
subclass of DumpExportRepositoryListener
in order to parse the JSON data in a specific "bulk
container" format. Each time that an instance of
DumpExportRepository
is parsed, the instance item listener will be invoked.
*/
class BulkContainerDumpExportRepositoryJsonListener
: public AbstractMainDumpExportRepositoryJsonListener {
friend class AbstractStackedDumpExportRepositoryJsonListener;
public:
BulkContainerDumpExportRepositoryJsonListener(
DumpExportRepositoryListener* itemListener);
~BulkContainerDumpExportRepositoryJsonListener();
bool Handle(const BJsonEvent& event);
private:
DumpExportRepositoryListener* fItemListener;
};
#endif // GEN_JSON_SCHEMA_PARSER__SINGLEDUMPEXPORTREPOSITORYJSONLISTENER_H

View File

@ -1,124 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:33.022114
*/
#include "DumpExportRepositorySource.h"
DumpExportRepositorySource::DumpExportRepositorySource()
{
fUrl = NULL;
fRepoInfoUrl = NULL;
fCode = NULL;
}
DumpExportRepositorySource::~DumpExportRepositorySource()
{
if (fUrl != NULL) {
delete fUrl;
}
if (fRepoInfoUrl != NULL) {
delete fRepoInfoUrl;
}
if (fCode != NULL) {
delete fCode;
}
}
BString*
DumpExportRepositorySource::Url()
{
return fUrl;
}
void
DumpExportRepositorySource::SetUrl(BString* value)
{
fUrl = value;
}
void
DumpExportRepositorySource::SetUrlNull()
{
if (!UrlIsNull()) {
delete fUrl;
fUrl = NULL;
}
}
bool
DumpExportRepositorySource::UrlIsNull()
{
return fUrl == NULL;
}
BString*
DumpExportRepositorySource::RepoInfoUrl()
{
return fRepoInfoUrl;
}
void
DumpExportRepositorySource::SetRepoInfoUrl(BString* value)
{
fRepoInfoUrl = value;
}
void
DumpExportRepositorySource::SetRepoInfoUrlNull()
{
if (!RepoInfoUrlIsNull()) {
delete fRepoInfoUrl;
fRepoInfoUrl = NULL;
}
}
bool
DumpExportRepositorySource::RepoInfoUrlIsNull()
{
return fRepoInfoUrl == NULL;
}
BString*
DumpExportRepositorySource::Code()
{
return fCode;
}
void
DumpExportRepositorySource::SetCode(BString* value)
{
fCode = value;
}
void
DumpExportRepositorySource::SetCodeNull()
{
if (!CodeIsNull()) {
delete fCode;
fCode = NULL;
}
}
bool
DumpExportRepositorySource::CodeIsNull()
{
return fCode == NULL;
}

View File

@ -1,40 +0,0 @@
/*
* Generated Model Object
* source json-schema : dumpexport.json
* generated at : 2017-12-07T23:22:33.021952
*/
#ifndef GEN_JSON_SCHEMA_MODEL__DUMPEXPORTREPOSITORYSOURCE_H
#define GEN_JSON_SCHEMA_MODEL__DUMPEXPORTREPOSITORYSOURCE_H
#include "List.h"
#include "String.h"
class DumpExportRepositorySource {
public:
DumpExportRepositorySource();
virtual ~DumpExportRepositorySource();
BString* Url();
void SetUrl(BString* value);
void SetUrlNull();
bool UrlIsNull();
BString* RepoInfoUrl();
void SetRepoInfoUrl(BString* value);
void SetRepoInfoUrlNull();
bool RepoInfoUrlIsNull();
BString* Code();
void SetCode(BString* value);
void SetCodeNull();
bool CodeIsNull();
private:
BString* fUrl;
BString* fRepoInfoUrl;
BString* fCode;
};
#endif // GEN_JSON_SCHEMA_MODEL__DUMPEXPORTREPOSITORYSOURCE_H

View File

@ -0,0 +1,96 @@
{
"$schema": "http://json-schema.org/schema#",
"id": "http://depot.haiku-os.org/schema/pkg/dumpexport.json",
"title": "Pkg",
"javaType": "org.haiku.haikudepotserver.pkg.model.dumpexport.DumpExportPkg",
"type": "object",
"properties": {
"name": {
"type": "string"
},
"modifyTimestamp": {
"type": "integer"
},
"prominenceOrdering": {
"type": "integer"
},
"derivedRating": {
"type": "number"
},
"pkgScreenshots": {
"type": "array",
"items": {
"type": "object",
"javaType": "org.haiku.haikudepotserver.pkg.model.dumpexport.DumpExportPkgScreenshot",
"properties": {
"code": {
"type": "string"
},
"length": {
"type": "integer"
},
"height": {
"type": "integer"
},
"width": {
"type": "integer"
},
"ordering": {
"type": "integer"
}
}
}
},
"pkgCategories": {
"type": "array",
"items": {
"type": "object",
"javaType": "org.haiku.haikudepotserver.pkg.model.dumpexport.DumpExportPkgCategory",
"properties": {
"code": {
"type": "string"
}
}
}
},
"pkgVersions": {
"type": "array",
"items": {
"type": "object",
"javaType": "org.haiku.haikudepotserver.pkg.model.dumpexport.DumpExportPkgVersion",
"properties": {
"major": {
"type": "string"
},
"minor": {
"type": "string"
},
"micro": {
"type": "string"
},
"preRelease": {
"type": "string"
},
"revision": {
"type": "integer"
},
"architectureCode": {
"type": "string"
},
"title": {
"type": "string"
},
"summary": {
"type": "string"
},
"description": {
"type": "string"
},
"payloadLength": {
"type": "integer"
}
}
}
}
}
}

View File

@ -0,0 +1,60 @@
{
"$schema": "http://json-schema.org/schema#",
"id": "http://depot.haiku-os.org/schema/repository/dumpexport.json",
"title": "Repository",
"javaType": "org.haiku.haikudepotserver.repository.model.dumpexport.DumpExportRepository",
"type": "object",
"properties": {
"code": {
"type": "string"
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"informationUrl": {
"type": "string"
},
"repositorySources": {
"type": "array",
"items": {
"type": "object",
"javaType": "org.haiku.haikudepotserver.repository.model.dumpexport.DumpExportRepositorySource",
"properties": {
"code": {
"type": "string"
},
"url": {
"type": "string"
},
"repoInfoUrl": {
"type": "string"
},
"repositorySourceMirrors": {
"type": "array",
"items": {
"type": "object",
"javaType": "org.haiku.haikudepotserver.repository.model.dumpexport.DumpExportRepositorySourceMirror",
"properties": {
"countryCode": {
"type": "string"
},
"baseUrl": {
"type": "string"
},
"description": {
"type": "string"
},
"isPrimary": {
"type": "boolean"
}
}
}
}
}
}
}
}
}