Skip to content

Commit

Permalink
editing config now mostly supported
Browse files Browse the repository at this point in the history
  • Loading branch information
harlo authored and harlo committed Jan 4, 2014
1 parent 425883b commit 38f0015
Show file tree
Hide file tree
Showing 9 changed files with 427 additions and 74 deletions.
19 changes: 18 additions & 1 deletion ISData/database.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import time
import time, json, os
from vars import TIMESTAMP_FORMAT
__metaclass__ = type

Expand All @@ -24,6 +24,23 @@ def indexExists(self, **args):
def createIndex(self, **args):
print "creating a new index on database"

def updateConfig(self, db_name, config):
print "updating sync properties for this database"
from ISUtils.process_utils import getConf

f = open(getConf(os.path.abspath(__file__)), 'rb')
c = json.loads(f.read())
f.close()

for key in config.keys():
c[db_name][key] = config[key]

f = open(getConf(os.path.abspath(__file__)), 'wb+')
f.write(json.dumps(c))
f.close()

return c[db_name]

def parseTimestamp(self, timestamp):
print "parsing timestamp %s according to %d" % (timestamp, self.timestamp_format)
t = time.strptime(timestamp, "%a, %d %b %Y %H:%M:%S %Z")
Expand Down
41 changes: 25 additions & 16 deletions ISData/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,20 @@ def __init__(self):
f = open(getConf(os.path.abspath(__file__)), 'rb')
els_conf = json.loads(f.read())['ELS']
f.close()

port = els_conf['port']
root_name = els_conf['root_name']
except:
port = 9200
root_name = "minx"
els_conf = {
'port' : 9200,
'root_name' : "minx"
}

port = els_conf['port']
root_name = els_conf['root_name']

try:
self.is_active = els_conf['is_active']
except KeyError as e:
print "is_active not yet set for Elasticsearch. using False as default"
self.is_active = False

self.url = "http://localhost:%d/%s/" % (port, root_name)
self.timestamp_format = TIMESTAMP_FORMAT['milliseconds']
Expand Down Expand Up @@ -56,20 +64,21 @@ def delete(self, river, _id):
return False

def update(self, river, asset, _id):
try:
r = requests.put("%s%s/%s" % (self.url, river, _id), data=json.dumps(asset))
except requests.exceptions.ConnectionError as e:
print e
return False
if self.is_active:
try:
r = requests.put("%s%s/%s" % (self.url, river, _id), data=json.dumps(asset))
except requests.exceptions.ConnectionError as e:
print e
return False

print r.text
result = json.loads(r.text)

try:
return result['ok']
except KeyError as e:
print r.text
result = json.loads(r.text)

try:
return result['ok']
except KeyError as e:
print r.text

return False

def create(self, river, asset, _id=None):
Expand Down
48 changes: 31 additions & 17 deletions ISData/m2xdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,36 +16,50 @@ def __init__(self):

try:
f = open(getConf(os.path.abspath(__file__)), 'rb')
m2x_conf = json.loads(f.read())['M2X']
self.conf = json.loads(f.read())['M2X']
f.close()
except:
self.conf = {}
return

self.url = "http://api-m2x.att.com/v1/feeds/%s" % m2x_conf['feed_id']
try:
self.is_active = self.conf['is_active']
except KeyError as e:
print "is_active not yet set for M2X. using False as default"
self.is_active = False

self.url = "http://api-m2x.att.com/v1/feeds/%s" % self.conf['feed_id']
self.header = {
"X-M2X-KEY" : m2x_conf['api_key'],
"X-M2X-KEY" : self.conf['api_key'],
"Content-type" : "application/json",
"Accept-Encoding" : "gzip, deflate",
"User-Agent" : "python-m2x/%s" % version
}
self.timestamp_format = TIMESTAMP_FORMAT['iso8601']


def updateConfig(self, config):
self.conf = super(M2XDB, self).updateConfig("M2X", config)
return True

def update(self, stream, asset):
try:
r = requests.post(
"%s/streams/%s/values" % (self.url, stream),
data=json.dumps({ 'values' : [asset]}),
headers=self.header
)
except requests.exceptions.ConnectionError as e:
print e
return False
if self.is_active:
try:
r = requests.post(
"%s/streams/%s/values" % (self.url, stream),
data=json.dumps({ 'values' : [asset]}),
headers=self.header
)
except requests.exceptions.ConnectionError as e:
print e
return False

print r.headers
print r.status_code
print r.content
print r.headers
print r.status_code
print r.content

return r.status_code in STATUS_OK
return r.status_code in STATUS_OK

return False

def create(self, stream, asset):
return self.update(stream, asset)
Expand Down
2 changes: 1 addition & 1 deletion ISUtils/process_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,6 @@ def stopDaemon(pid_file):
os.kill(pid, signal.SIGTERM)
return True
except OSError as e:
"could not kill process at PID %d" % pid
print "could not kill process at PID %d" % pid

return False
2 changes: 2 additions & 0 deletions ISUtils/scrape_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,12 @@ def determinePattern(str):
return pattern

def buildRegex(tag):
print tag
pattern = "(" + determinePattern(tag.get_text()) + ")"

parent = "".join(str(e) for e in tag.parent.contents)
segments = [sanitizeForRegex(e) for e in parent.split(str(tag))]
print segments[1]
pattern = ('.*' + segments[0] + pattern + segments[1] + '.*')

return tag.attrs['id'].replace("IS_start_", ""), pattern
125 changes: 100 additions & 25 deletions api.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,94 @@

log_format = "%(asctime)s %(message)s"

def getConf():
try:
f = open(path_to_conf,'rb')
conf_ = json.loads(f.read())
f.close()
except IOError as e:
print e
conf_ = {}

return conf_

class Res():
def __init__(self):
self.result = STATUS_FAIL[0]

def emit(self):
return self.__dict__

class EngineHandler(tornado.web.RequestHandler):
def initialize(self, action):
self.action = action

def post(self, action):
res = Res()

if action == "sync":
try:
s = json.loads(self.request.body)
db_name = s['database']
del s['database']
except ValueError as e:
print e
self.finish(res.emit())
return
except KeyError as e:
print e
self.finish(res.emit())
return

db = None
if db_name == "M2X":
from ISData.m2xdb import M2XDB
db = M2XDB()

if db is not None:
db.updateConfig(s)
res.result = STATUS_OK[0]
else:
activate = False
if action == "start":
activate = True

for scraper in getScrapers(scraper_dir):
s = Schema(scraper['url'])
if s.is_active != activate:
s.activate(activate=activate)

res.result = STATUS_OK[0]

self.finish(res.emit())

class ConfigHandler(tornado.web.RequestHandler):
def get(self):
res = Res()
res.result = STATUS_OK[0]

conf_ = getConf()
sync = []
for key in conf_:
s = {
'vars' : [],
'database': key
}

for var in conf_[key].keys():
if var == "is_active":
s['is_active'] = conf_[key][var]
continue

s['vars'].append({
'key' : var,
'value' : conf_[key][var]
})

sync.append(s)

res.data = {
'conf' : conf_,
'sync' : sync,
'scrapers' : getScrapers(scraper_dir)
}

Expand All @@ -34,35 +109,44 @@ def get(self):
def post(self):
res = Res()

print "update config"
try:
s = json.loads(self.request.body)
# load up conf
except ValueError as e:
print e
self.finish(res.emit())
return

id_ = s['id']
del s['id']

print "update config"

f = open(os.path.join(scraper_dir, id_, "conf.json"), 'rb')
schema_conf = json.loads(f.read())
schema = Schema(json.loads(f.read())['url'])
f.close()


should_activate = None
for key in s.keys():
print key
if key == "is_active":
should_activate = s[key]
continue

if type(s[key]) == dict:
val = getattr(schema, key)

for key_ in s[key].keys():
schema_conf[key][key_] = s[key][key_]

val[key_] = s[key][key_]

schema.setattr(key, val)
else:
schema_conf[key] = s[key]
schema.setattr(key, s[key])

f = open(os.path.join(scraper_dir, id_, "conf.json"), 'wb+')
f.write(schema_conf)
f.close()
schema.save()

if should_activate is not None:
print "with activation: %s!" % should_activate
schema.activate(activate=should_activate)

res.result = STATUS_OK[0]
print res.emit()
self.finish(res.emit())
Expand Down Expand Up @@ -125,7 +209,8 @@ def terminationHandler(signal, frame):

routes = [
(r'/', MainHandler),
(r'/config', ConfigHandler)
(r'/config', ConfigHandler),
(r'/engine/(start|stop|sync)', EngineHandler, dict(action=None))
]

api = tornado.web.Application(routes)
Expand All @@ -134,22 +219,12 @@ def terminationHandler(signal, frame):
if __name__ == "__main__":
log_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logs")
scraper_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "UserModels")
path_to_conf = os.path.join(os.path.dirname(os.path.abspath(__file__)), "conf.json")

if not os.path.exists(log_dir):
os.makedirs(log_dir)
log_file = os.path.join(log_dir, "api_log.txt")

try:
f = open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "conf.json"),
'rb'
)
conf_ = json.loads(f.read())
f.close()
except IOError as e:
print e
conf_ = {}

logging.basicConfig(filename=log_file, format=log_format, level=logging.INFO)
logging.info("API Started.")

Expand Down
Loading

0 comments on commit 38f0015

Please sign in to comment.