Merge branch 'each_integration_a_directory' into 'master'

Each integration a directory

Closes Issue #1 

put each integration in it's own subdirectory.
This paves the way to be able to easily ignore integrations later, in case you never want to touch a production integration.

See merge request !1
This commit is contained in:
Tyrel Souza 2016-05-25 17:33:00 +00:00
commit 1dd760f32f
3 changed files with 66 additions and 29 deletions

View File

@ -2,3 +2,5 @@
## [Unreleased] ## [Unreleased]
### Added ### Added
- Ability to download all imports/flows from Celigo
- Storing each flow in a subdirectory of an integration

View File

@ -1,4 +1,5 @@
import glob import glob
import collections
import logging import logging
import requests import requests
import io import io
@ -9,6 +10,7 @@ from slugify import slugify
from .prompt import prompt from .prompt import prompt
logging.basicConfig(level=logging.INFO)
L = logging.getLogger(__name__) L = logging.getLogger(__name__)
DEFAULT_BASE_URL = "https://api.integrator.io/v1/" DEFAULT_BASE_URL = "https://api.integrator.io/v1/"
@ -76,11 +78,10 @@ class BackupCeligo(object):
self.session = requests.Session() self.session = requests.Session()
self.session.headers.update(self.headers) self.session.headers.update(self.headers)
def ensure_directories_exist(self): def ensure_directories_exist(self, subdirs=None):
""" Make the directory if it doesn't exist """ """ Make the directory if it doesn't exist """
subdirs = ('imports', 'connections')
for subdir in subdirs: for subdir in subdirs:
L.info("Creating subdir: `%s`", subdir)
_dir = os.path.join(self.data_dir, subdir) _dir = os.path.join(self.data_dir, subdir)
if not os.path.exists(_dir): if not os.path.exists(_dir):
os.makedirs(_dir) os.makedirs(_dir)
@ -116,13 +117,20 @@ class BackupCeligo(object):
L.info("Restored backup to %s", self.base_url + path) L.info("Restored backup to %s", self.base_url + path)
return response return response
def backup(self, auto=False): def _get_integration_placeholders(self):
""" try:
Get all the flow data from Celigo. integrations = self._celigo_api_get("integrations/")
Then loop over each flow and cache it's Import data in an instance except requests.exceptions.RequestException:
varable. L.info('HTTP Request failed')
Once this is cached, save the imports. raise
""" # Setup integrations dictionaries
for integration in integrations:
self.imports_cache[integration['_id']] = {
'name': integration['name'],
'slug': slugify(integration['name']),
'flows': []}
def _get_flow_configurations(self):
try: try:
flows = self._celigo_api_get("flows/") flows = self._celigo_api_get("flows/")
for flow in flows: for flow in flows:
@ -131,10 +139,23 @@ class BackupCeligo(object):
L.info('HTTP Request failed') L.info('HTTP Request failed')
raise raise
L.info("Got all imports, writing now") L.info("Got all imports, writing now")
L.info("We have imports for: %s", ", ".join(self.imports_cache.keys()))
for flow_name, (import_id, import_conf) in self.imports_cache.items(): def _save_each_flow(self, auto=False):
self.save_import(flow_name, auto) for integration_id, integration in self.imports_cache.items():
for flow in integration['flows']:
self.save_flow(integration_id, flow, auto)
def backup(self, auto=False):
"""
Get all the flow data from Celigo.
Then loop over each flow and cache it's Import data in an instance
varable.
Once this is cached, save the imports.
"""
self._get_integration_placeholders()
self._get_flow_configurations()
self._save_each_flow(auto)
def restore(self, auto=False): def restore(self, auto=False):
""" """
@ -174,28 +195,41 @@ class BackupCeligo(object):
""" """
Stores the import in self.imports_cache before write. Stores the import in self.imports_cache before write.
""" """
flow_name = slugify(flow['name']) flow_name = flow['name']
import_id = flow['_importId'] import_id = flow['_importId']
integration_id = flow['_integrationId']
import_conf = self._celigo_api_get( import_conf = self._celigo_api_get(
"imports/{id}/distributed".format( "imports/{id}/distributed".format(
id=import_id)) id=import_id))
self.imports_cache[flow_name] = (import_id, import_conf) self.imports_cache[integration_id]['flows'].append({
"name": flow_name,
"id": import_id,
"configuration": import_conf
})
def save_import(self, flow_name, auto=False): def save_flow(self, integration_id, flow, auto=False):
""" """
Write the import to a .json file with name_id.json format. Write the import to a .json file with name_id.json format.
Prompt for overwrite. Prompt for overwrite.
:param flow_name: the slugified name of the flow as a key :param flow: dictionary of "name", "id", "configuration" for the
flow.
:param auto: if auto is true, don't prompt for overwrite :param auto: if auto is true, don't prompt for overwrite
""" """
import_id, import_conf = self.imports_cache[flow_name] flow_name = flow['name']
flow_id = flow['id']
flow_conf = flow['configuration']
filedir = os.path.join(self.data_dir,
"integrations",
integration_id,
"imports")
self.ensure_directories_exist((filedir,))
filename = os.path.join( filename = os.path.join(
self.data_dir, filedir,
"imports", "%s_%s.json" % (slugify(flow_name), flow_id))
"%s_%s.json" % (flow_name, import_id))
write = True write = True
# By default, we prompt for overwrites # By default, we prompt for overwrites
@ -209,7 +243,7 @@ class BackupCeligo(object):
write = bool(overwrite == "Yes") write = bool(overwrite == "Yes")
if write: if write:
self.write_json(filename, import_conf) self.write_json(filename, flow_conf)
else: else:
L.info("You chose not to save this file.") L.info("You chose not to save this file.")

View File

@ -30,9 +30,7 @@ class CeligoTest(unittest.TestCase):
""" """
data_dir = "fakedir" data_dir = "fakedir"
os.environ['CELIGO_API_KEY'] = '' os.environ['CELIGO_API_KEY'] = ''
with self.assertRaisesRegexp( with self.assertRaisesRegexp(Exception, 'Please pass in api_key.*'):
Exception,
'Please pass in api_key.*'):
celigo.BackupCeligo(data_dir) celigo.BackupCeligo(data_dir)
os.environ['CELIGO_API_KEY'] = self.FAKE_API_KEY os.environ['CELIGO_API_KEY'] = self.FAKE_API_KEY
bc = celigo.BackupCeligo(data_dir) bc = celigo.BackupCeligo(data_dir)
@ -48,9 +46,11 @@ class CeligoTest(unittest.TestCase):
""" """
Test that the ensure_directories_exist works properly. Test that the ensure_directories_exist works properly.
""" """
fake_id = "fake_integration_id"
tempdir = tempfile.mkdtemp('celigo_testing') tempdir = tempfile.mkdtemp('celigo_testing')
bc = celigo.BackupCeligo(tempdir) bc = celigo.BackupCeligo(tempdir)
imports_dir = os.path.join(tempdir, "imports")
imports_dir = os.path.join(tempdir, "integrations", fake_id, "imports")
connections_dir = os.path.join(tempdir, "connections") connections_dir = os.path.join(tempdir, "connections")
# Check that the directories don't exist already. # Check that the directories don't exist already.
self.assertFalse( self.assertFalse(
@ -59,8 +59,9 @@ class CeligoTest(unittest.TestCase):
self.assertFalse( self.assertFalse(
os.path.exists(connections_dir), os.path.exists(connections_dir),
"connections dir exists") "connections dir exists")
# Make the directories. # Make the directories.
bc.ensure_directories_exist() bc.ensure_directories_exist((imports_dir, connections_dir))
self.assertTrue( self.assertTrue(
os.path.exists(imports_dir), os.path.exists(imports_dir),
"Did not create proper directory" "Did not create proper directory"
@ -71,7 +72,7 @@ class CeligoTest(unittest.TestCase):
) )
# Make sure nothing errors if the directories exist already. # Make sure nothing errors if the directories exist already.
bc.ensure_directories_exist() bc.ensure_directories_exist((imports_dir, connections_dir))
# @requests_mock.Mocker() # @requests_mock.Mocker()
# def test_fake_requests(self, rqm): # def test_fake_requests(self, rqm):