Merge branch 'each_integration_a_directory' into 'master'

Each integration a directory

Closes Issue #1 

put each integration in it's own subdirectory.
This paves the way to be able to easily ignore integrations later, in case you never want to touch a production integration.

See merge request !1
This commit is contained in:
Tyrel Souza 2016-05-25 17:33:00 +00:00
commit 1dd760f32f
3 changed files with 66 additions and 29 deletions

View File

@ -2,3 +2,5 @@
## [Unreleased]
### Added
- Ability to download all imports/flows from Celigo
- Storing each flow in a subdirectory of an integration

View File

@ -1,4 +1,5 @@
import glob
import collections
import logging
import requests
import io
@ -9,6 +10,7 @@ from slugify import slugify
from .prompt import prompt
logging.basicConfig(level=logging.INFO)
L = logging.getLogger(__name__)
DEFAULT_BASE_URL = "https://api.integrator.io/v1/"
@ -76,11 +78,10 @@ class BackupCeligo(object):
self.session = requests.Session()
self.session.headers.update(self.headers)
def ensure_directories_exist(self):
def ensure_directories_exist(self, subdirs=None):
""" Make the directory if it doesn't exist """
subdirs = ('imports', 'connections')
for subdir in subdirs:
L.info("Creating subdir: `%s`", subdir)
_dir = os.path.join(self.data_dir, subdir)
if not os.path.exists(_dir):
os.makedirs(_dir)
@ -116,13 +117,20 @@ class BackupCeligo(object):
L.info("Restored backup to %s", self.base_url + path)
return response
def backup(self, auto=False):
"""
Get all the flow data from Celigo.
Then loop over each flow and cache it's Import data in an instance
varable.
Once this is cached, save the imports.
"""
def _get_integration_placeholders(self):
try:
integrations = self._celigo_api_get("integrations/")
except requests.exceptions.RequestException:
L.info('HTTP Request failed')
raise
# Setup integrations dictionaries
for integration in integrations:
self.imports_cache[integration['_id']] = {
'name': integration['name'],
'slug': slugify(integration['name']),
'flows': []}
def _get_flow_configurations(self):
try:
flows = self._celigo_api_get("flows/")
for flow in flows:
@ -131,10 +139,23 @@ class BackupCeligo(object):
L.info('HTTP Request failed')
raise
L.info("Got all imports, writing now")
L.info("We have imports for: %s", ", ".join(self.imports_cache.keys()))
for flow_name, (import_id, import_conf) in self.imports_cache.items():
self.save_import(flow_name, auto)
def _save_each_flow(self, auto=False):
for integration_id, integration in self.imports_cache.items():
for flow in integration['flows']:
self.save_flow(integration_id, flow, auto)
def backup(self, auto=False):
"""
Get all the flow data from Celigo.
Then loop over each flow and cache it's Import data in an instance
varable.
Once this is cached, save the imports.
"""
self._get_integration_placeholders()
self._get_flow_configurations()
self._save_each_flow(auto)
def restore(self, auto=False):
"""
@ -174,28 +195,41 @@ class BackupCeligo(object):
"""
Stores the import in self.imports_cache before write.
"""
flow_name = slugify(flow['name'])
flow_name = flow['name']
import_id = flow['_importId']
integration_id = flow['_integrationId']
import_conf = self._celigo_api_get(
"imports/{id}/distributed".format(
id=import_id))
self.imports_cache[flow_name] = (import_id, import_conf)
self.imports_cache[integration_id]['flows'].append({
"name": flow_name,
"id": import_id,
"configuration": import_conf
})
def save_import(self, flow_name, auto=False):
def save_flow(self, integration_id, flow, auto=False):
"""
Write the import to a .json file with name_id.json format.
Prompt for overwrite.
:param flow_name: the slugified name of the flow as a key
:param flow: dictionary of "name", "id", "configuration" for the
flow.
:param auto: if auto is true, don't prompt for overwrite
"""
import_id, import_conf = self.imports_cache[flow_name]
flow_name = flow['name']
flow_id = flow['id']
flow_conf = flow['configuration']
filedir = os.path.join(self.data_dir,
"integrations",
integration_id,
"imports")
self.ensure_directories_exist((filedir,))
filename = os.path.join(
self.data_dir,
"imports",
"%s_%s.json" % (flow_name, import_id))
filedir,
"%s_%s.json" % (slugify(flow_name), flow_id))
write = True
# By default, we prompt for overwrites
@ -209,7 +243,7 @@ class BackupCeligo(object):
write = bool(overwrite == "Yes")
if write:
self.write_json(filename, import_conf)
self.write_json(filename, flow_conf)
else:
L.info("You chose not to save this file.")

View File

@ -30,9 +30,7 @@ class CeligoTest(unittest.TestCase):
"""
data_dir = "fakedir"
os.environ['CELIGO_API_KEY'] = ''
with self.assertRaisesRegexp(
Exception,
'Please pass in api_key.*'):
with self.assertRaisesRegexp(Exception, 'Please pass in api_key.*'):
celigo.BackupCeligo(data_dir)
os.environ['CELIGO_API_KEY'] = self.FAKE_API_KEY
bc = celigo.BackupCeligo(data_dir)
@ -48,9 +46,11 @@ class CeligoTest(unittest.TestCase):
"""
Test that the ensure_directories_exist works properly.
"""
fake_id = "fake_integration_id"
tempdir = tempfile.mkdtemp('celigo_testing')
bc = celigo.BackupCeligo(tempdir)
imports_dir = os.path.join(tempdir, "imports")
imports_dir = os.path.join(tempdir, "integrations", fake_id, "imports")
connections_dir = os.path.join(tempdir, "connections")
# Check that the directories don't exist already.
self.assertFalse(
@ -59,8 +59,9 @@ class CeligoTest(unittest.TestCase):
self.assertFalse(
os.path.exists(connections_dir),
"connections dir exists")
# Make the directories.
bc.ensure_directories_exist()
bc.ensure_directories_exist((imports_dir, connections_dir))
self.assertTrue(
os.path.exists(imports_dir),
"Did not create proper directory"
@ -71,7 +72,7 @@ class CeligoTest(unittest.TestCase):
)
# Make sure nothing errors if the directories exist already.
bc.ensure_directories_exist()
bc.ensure_directories_exist((imports_dir, connections_dir))
# @requests_mock.Mocker()
# def test_fake_requests(self, rqm):