[copr] master: [backend] pass results_baseurl to createrepo (ba5f493)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit ba5f493afaff4b6c14bfb65773499e24788cfaec
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Tue Nov 4 11:29:50 2014 +0100
[backend] pass results_baseurl to createrepo
>---------------------------------------------------------------
backend/backend/actions.py | 44 +++++++++++++++-----------
backend/backend/createrepo.py | 10 +++---
backend/backend/dispatcher.py | 1 +
backend/backend/mockremote.py | 52 ++++++++++++++++++++-----------
backend/copr-be.py | 3 +-
backend/tests/test_action.py | 68 +++++++++++++++++++++++++++++-----------
6 files changed, 116 insertions(+), 62 deletions(-)
diff --git a/backend/backend/actions.py b/backend/backend/actions.py
index a6d80c1..8e8fd04 100644
--- a/backend/backend/actions.py
+++ b/backend/backend/actions.py
@@ -24,7 +24,9 @@ class Action(object):
"""
- def __init__(self, events, action, lock, frontend_callback, destdir, front_url):
+ def __init__(self, events, action, lock,
+ frontend_callback, destdir,
+ front_url, results_root_url):
super(Action, self).__init__()
self.frontend_callback = frontend_callback
self.destdir = destdir
@@ -32,6 +34,7 @@ class Action(object):
self.events = events
self.lock = lock
self.front_url = front_url
+ self.results_root_url = results_root_url
def add_event(self, what):
self.events.put({"when": time.time(), "who": "action", "what": what})
@@ -91,11 +94,13 @@ class Action(object):
def handle_delete_build(self):
self.add_event("Action delete build")
project = self.data["old_value"]
+
ext_data = json.loads(self.data["data"])
+ username = ext_data["username"]
+ projectname = ext_data["projectname"]
- packages = [os.path.basename(x).replace(".src.rpm", "") for x in \
- ext_data["pkgs"].split()]
- # self.data["data"].split()]
+ packages = [os.path.basename(x).replace(".src.rpm", "")
+ for x in ext_data["pkgs"].split()]
path = os.path.join(self.destdir, project)
@@ -118,10 +123,10 @@ class Action(object):
# than I delete the failed, it would delete the succeeded
# files as well - that would be wrong.
for pkg in packages:
- if self.data["object_type"] == "build-succeeded" or \
- (self.data["object_type"] == "build-failed" and
- os.path.exists(os.path.join(path, chroot, pkg, "fail"))
- ):
+ if self.data["object_type"] == "build-succeeded" or (
+ self.data["object_type"] == "build-failed" and
+ os.path.exists(os.path.join(path, chroot, pkg, "fail"))):
+
pkg_path = os.path.join(path, chroot, pkg)
if os.path.isdir(pkg_path):
self.add_event("Removing build {0}".format(pkg_path))
@@ -132,16 +137,19 @@ class Action(object):
"Package {0} dir not found in chroot {1}"
.format(pkg, chroot))
- if altered:
- self.add_event("Running createrepo")
- _, _, err = createrepo(
- path=os.path.join(path, chroot), lock=self.lock,
- front_url=self.front_url,
- username=ext_data["username"], projectname=ext_data["projectname"]
- )
- if err.strip():
- self.add_event(
- "Error making local repo: {0}".format(err))
+ if altered:
+ self.add_event("Running createrepo")
+
+ result_base_url = "/".join([self.results_root_url, username,
+ projectname, chroot])
+ _, _, err = createrepo(
+ path=os.path.join(path, chroot), lock=self.lock,
+ front_url=self.front_url, base_url=result_base_url,
+ username=username, projectname=projectname
+ )
+ if err.strip():
+ self.add_event(
+ "Error making local repo: {0}".format(err))
log_path = os.path.join(
path, chroot,
diff --git a/backend/backend/createrepo.py b/backend/backend/createrepo.py
index 3dedb3d..d28e0f9 100644
--- a/backend/backend/createrepo.py
+++ b/backend/backend/createrepo.py
@@ -69,11 +69,11 @@ def createrepo(path, front_url, username, projectname, base_url=None, lock=None)
Creates repo depending on the project setting "auto_createrepo".
When enabled creates `repodata` at the provided path, otherwise
- :param str path: directory with rpms
- :param str front_url: url to the copr frontend
- :param str username: copr project owner username
- :param str projectname: copr project name
- :param str base_url: base_url to access rpms independently of repomd location
+ :param path: directory with rpms
+ :param front_url: url to the copr frontend
+ :param username: copr project owner username
+ :param projectname: copr project name
+ :param base_url: base_url to access rpms independently of repomd location
:param Multiprocessing.Lock lock: [optional] global copr-backend lock
:return: tuple(returncode, stdout, stderr) produced by `createrepo_c`
diff --git a/backend/backend/dispatcher.py b/backend/backend/dispatcher.py
index f305d0c..3ddc781 100644
--- a/backend/backend/dispatcher.py
+++ b/backend/backend/dispatcher.py
@@ -574,6 +574,7 @@ class Worker(multiprocessing.Process):
callback=CliLogCallBack(
quiet=True, logfn=chrootlogfile),
front_url=self.opts.frontend_base_url,
+ results_base_url=self.opts.results_baseurl
)
build_details = mr.build_pkgs(job.pkgs)
diff --git a/backend/backend/mockremote.py b/backend/backend/mockremote.py
index 919fbd9..ff97db2 100755
--- a/backend/backend/mockremote.py
+++ b/backend/backend/mockremote.py
@@ -539,7 +539,8 @@ class MockRemote(object):
def __init__(self, builder=None, user=DEF_USER, job=None,
cont=False, recurse=False, repos=None, callback=None,
remote_basedir=DEF_REMOTE_BASEDIR, remote_tempdir=None,
- macros=None, lock=None, do_sign=False, front_url=None):
+ macros=None, lock=None, do_sign=False,
+ front_url=None, results_base_url=None):
"""
@@ -592,6 +593,7 @@ class MockRemote(object):
self.lock = lock
self.do_sign = do_sign
self.front_url = front_url
+ self.results_base_url = results_base_url or u''
if not self.callback:
self.callback = DefaultCallBack()
@@ -685,6 +687,30 @@ class MockRemote(object):
fcntl.flock(r_log, fcntl.LOCK_UN)
r_log.close()
+ def do_createrepo(self, chroot_dir):
+ base_url = "/".join([self.results_base_url, self.job.project_owner,
+ self.job.project_name, self.job.chroot])
+ self.callback.log("Createrepo:: owner: {}; project: {}; front url: {}; path: {}; base_url: {}".format(
+ self.job.project_owner, self.job.project_name, self.front_url, chroot_dir, base_url
+ ))
+
+
+ _, _, err = createrepo(
+ path=chroot_dir,
+ front_url=self.front_url,
+ base_url=base_url,
+ username=self.job.project_owner,
+ projectname=self.job.project_name,
+ lock=self.lock,
+ )
+ if err.strip():
+ self.callback.error(
+ "Error making local repo: {0}".format(chroot_dir))
+
+ self.callback.error(str(err))
+ # FIXME - maybe clean up .repodata and .olddata
+ # here?
+
def build_pkgs(self, pkgs=None):
if not pkgs:
@@ -780,23 +806,7 @@ class MockRemote(object):
built_pkgs.append(pkg)
# createrepo with the new pkgs
- self.callback.log("Createrepo:: owner: {}; project: {}; front url: {}; path: {}".format(
- self.job.project_owner, self.job.project_name, self.front_url, chroot_dir
- ))
- _, _, err = createrepo(
- path=chroot_dir,
- front_url=self.front_url,
- username=self.job.project_owner,
- projectname=self.job.project_name,
- lock=self.lock,
- )
- if err.strip():
- self.callback.error(
- "Error making local repo: {0}".format(chroot_dir))
-
- self.callback.error(str(err))
- # FIXME - maybe clean up .repodata and .olddata
- # here?
+ self.do_createrepo(chroot_dir, )
if self.failed:
if len(self.failed) != len(to_be_built):
@@ -858,7 +868,10 @@ def parse_args(args):
action="store_true",
help="output very little to the terminal")
parser.add_option("-f", "--front_url", dest="front_url",
- help="copr frontend url")
+ help="copr frontend url")
+ parser.add_option("--results_url", dest="results_base_url",
+ help="backend base url for built packages")
+
opts, args = parser.parse_args(args)
@@ -930,6 +943,7 @@ def main(args):
do_sign=opts.do_sign,
callback=callback,
front_url=opts.front_url,
+ results_base_url=opts.results_base_url,
)
# FIXMES
diff --git a/backend/copr-be.py b/backend/copr-be.py
index 0869059..380a081 100755
--- a/backend/copr-be.py
+++ b/backend/copr-be.py
@@ -116,7 +116,8 @@ class CoprJobGrab(multiprocessing.Process):
for action in r_json["actions"]:
ao = Action(self.events, action, self.lock, destdir=self.opts.destdir,
frontend_callback=FrontendCallback(self.opts, self.events),
- front_url=self.opts.frontend_base_url)
+ front_url=self.opts.frontend_base_url,
+ results_root_url=self.opts.results_baseurl)
ao.run()
def run(self):
diff --git a/backend/tests/test_action.py b/backend/tests/test_action.py
index c5edb7a..7040530 100644
--- a/backend/tests/test_action.py
+++ b/backend/tests/test_action.py
@@ -20,14 +20,14 @@ else:
import logging
-logging.basicConfig(
- level=logging.INFO,
- format='[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s',
- datefmt='%H:%M:%S'
-)
-
-log = logging.getLogger()
-log.info("Logger initiated")
+# logging.basicConfig(
+# level=logging.INFO,
+# format='[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s',
+# datefmt='%H:%M:%S'
+# )
+#
+# log = logging.getLogger()
+# log.info("Logger initiated")
from backend.actions import Action, ActionType, ActionResult
@@ -39,6 +39,10 @@ else:
import Queue as queue
from Queue import Empty as EmptyQueue
+
+RESULTS_ROOT_URL = "http://example.com/results"
+
+
@mock.patch("backend.actions.time")
class TestAction(object):
@@ -80,8 +84,15 @@ class TestAction(object):
return self.tmp_dir_name
def test_action_event(self, mc_time):
- test_action = Action(events=self.test_q, action={}, lock=None,
- frontend_callback=None, destdir=None, front_url=None)
+ test_action = Action(
+ events=self.test_q,
+ action={}, lock=None,
+ frontend_callback=None,
+ destdir=None,
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
+ )
+
with pytest.raises(EmptyQueue):
test_action.events.get_nowait()
@@ -105,7 +116,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=None,
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
assert not mc_front_cb.called
@@ -136,7 +148,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=tmp_dir,
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
result_dict = mc_front_cb.update.call_args[0][0]["actions"][0]
@@ -167,7 +180,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=os.path.join(tmp_dir, "dir-not-exists"),
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
result_dict = mc_front_cb.update.call_args[0][0]["actions"][0]
@@ -197,7 +211,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=tmp_dir,
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
result_dict = mc_front_cb.update.call_args[0][0]["actions"][0]
@@ -227,7 +242,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=os.path.join(tmp_dir, "dir-not-exists"),
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
@@ -262,7 +278,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=tmp_dir,
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
@@ -303,7 +320,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=tmp_dir,
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
with mock.patch("backend.actions.shutil") as mc_shutil:
test_action.run()
@@ -362,6 +380,7 @@ class TestAction(object):
frontend_callback=mc_front_cb,
destdir=tmp_dir,
front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
assert os.path.exists(foo_pkg_dir)
@@ -384,7 +403,7 @@ class TestAction(object):
def assert_fedora20():
assert_what_from_queue(self.test_q, msg_list=[
"Removing build ",
- "Running createrepo",
+ #"Running createrepo",
"Package bar dir not found in chroot fedora20",
"Running createrepo",
])
@@ -407,6 +426,16 @@ class TestAction(object):
with pytest.raises(EmptyQueue):
self.test_q.get_nowait()
+ create_repo_expected_call = mock.call(
+ username=u'foo',
+ projectname=u'bar',
+ base_url=u'http://example.com/results/foo/bar/fedora20',
+ lock=None,
+ path='{}/old_dir/fedora20'.format(self.tmp_dir_name),
+ front_url=None
+ )
+ assert mc_createrepo.call_args == create_repo_expected_call
+
@mock.patch("backend.actions.createrepo")
def test_delete_build_succeeded_createrepo_error(self, mc_createrepo, mc_time):
mc_time.time.return_value = self.test_time
@@ -439,7 +468,8 @@ class TestAction(object):
events=self.test_q, lock=None,
frontend_callback=mc_front_cb,
destdir=tmp_dir,
- front_url=None
+ front_url=None,
+ results_root_url=RESULTS_ROOT_URL
)
test_action.run()
9 years, 6 months
[copr] master: [backend] createrepo.py added option to pass base_url (f68d1a7)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit f68d1a71036d1bad43cb8843dd58b9bfd448bdb3
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Tue Nov 4 10:08:43 2014 +0100
[backend] createrepo.py added option to pass base_url
>---------------------------------------------------------------
backend/backend/createrepo.py | 8 +++++---
backend/tests/test_createrepo.py | 25 +++++++++++++------------
2 files changed, 18 insertions(+), 15 deletions(-)
diff --git a/backend/backend/createrepo.py b/backend/backend/createrepo.py
index 6673be4..3dedb3d 100644
--- a/backend/backend/createrepo.py
+++ b/backend/backend/createrepo.py
@@ -64,7 +64,7 @@ def get_auto_createrepo_status(front_url, username, projectname):
return True
-def createrepo(path, front_url, username, projectname, lock=None):
+def createrepo(path, front_url, username, projectname, base_url=None, lock=None):
"""
Creates repo depending on the project setting "auto_createrepo".
When enabled creates `repodata` at the provided path, otherwise
@@ -73,15 +73,17 @@ def createrepo(path, front_url, username, projectname, lock=None):
:param str front_url: url to the copr frontend
:param str username: copr project owner username
:param str projectname: copr project name
+ :param str base_url: base_url to access rpms independently of repomd location
:param Multiprocessing.Lock lock: [optional] global copr-backend lock
-
:return: tuple(returncode, stdout, stderr) produced by `createrepo_c`
"""
# TODO: add means of logging
+ base_url = base_url or ""
+
if get_auto_createrepo_status(front_url, username, projectname):
return createrepo_unsafe(path, lock)
else:
- return createrepo_unsafe(path, lock, base_url="../", dest_dir="devel")
+ return createrepo_unsafe(path, lock, base_url=base_url, dest_dir="devel")
diff --git a/backend/tests/test_createrepo.py b/backend/tests/test_createrepo.py
index 088c5c8..40ec041 100644
--- a/backend/tests/test_createrepo.py
+++ b/backend/tests/test_createrepo.py
@@ -31,11 +31,10 @@ def test_createrepo_conditional_true(mc_client, mc_create_unsafe):
createrepo(path="/tmp/", front_url="http://example.com/api",
username="foo", projectname="bar", lock=None)
-
mc_create_unsafe.reset_mock()
-
- mc_client.return_value.get_project_details.return_value = MagicMock(data={"detail": {"auto_createrepo": True}})
+ mc_client.return_value.get_project_details.return_value = MagicMock(
+ data={"detail": {"auto_createrepo": True}})
createrepo(path="/tmp/", front_url="http://example.com/api",
username="foo", projectname="bar", lock=None)
@@ -47,10 +46,11 @@ def test_createrepo_conditional_true(mc_client, mc_create_unsafe):
def test_createrepo_conditional_false(mc_client, mc_create_unsafe):
mc_client.return_value.get_project_details.return_value = MagicMock(data={"detail": {"auto_createrepo": False}})
+ base_url = "http://example.com/repo/"
createrepo(path="/tmp/", front_url="http://example.com/api",
- username="foo", projectname="bar", lock=None)
+ username="foo", projectname="bar", base_url=base_url, lock=None)
- assert mc_create_unsafe.call_args == mock.call('/tmp/', None, dest_dir='devel', base_url='../')
+ assert mc_create_unsafe.call_args == mock.call('/tmp/', None, dest_dir='devel', base_url=base_url)
@mock.patch('backend.createrepo.Popen')
@@ -58,6 +58,7 @@ class TestCreaterepoUnsafe(object):
def setup_method(self, method):
self.tmp_dir_name = self.make_temp_dir()
self.test_time = time.time()
+ self.base_url = "http://example.com/repo/"
def teardown_method(self, method):
self.rm_tmp_dir()
@@ -129,11 +130,11 @@ class TestCreaterepoUnsafe(object):
expected_epel_5 = ['/usr/bin/createrepo_c', '--database', '--ignore-lock',
'-s', 'sha', '--checksum', 'md5',
'--outputdir', os.path.join(path_epel_5, "devel"),
- '--baseurl', '../', path_epel_5]
+ '--baseurl', self.base_url, path_epel_5]
path_fedora = os.path.join(self.tmp_dir_name, "fedora-21")
expected_fedora = ['/usr/bin/createrepo_c', '--database', '--ignore-lock',
'--outputdir', os.path.join(path_fedora, "devel"),
- '--baseurl', '../', path_fedora]
+ '--baseurl', self.base_url, path_fedora]
for path, expected in [(path_epel_5, expected_epel_5), (path_fedora, expected_fedora)]:
os.makedirs(path)
@@ -142,7 +143,7 @@ class TestCreaterepoUnsafe(object):
with open(os.path.join(repo_path, "repomd.xml"), "w") as handle:
handle.write("1")
- createrepo_unsafe(path, lock=None, base_url="../", dest_dir="devel")
+ createrepo_unsafe(path, lock=None, base_url=self.base_url, dest_dir="devel")
assert mc_popen.call_args == mock.call(expected, stderr=-1, stdout=-1)
def test_createrepo_devel_generated_commands(self, mc_popen):
@@ -152,15 +153,15 @@ class TestCreaterepoUnsafe(object):
expected_epel_5 = ['/usr/bin/createrepo_c', '--database', '--ignore-lock',
'-s', 'sha', '--checksum', 'md5',
'--outputdir', os.path.join(path_epel_5, "devel"),
- '--baseurl', '../', path_epel_5]
+ '--baseurl', self.base_url, path_epel_5]
path_fedora = os.path.join(self.tmp_dir_name, "fedora-21")
expected_fedora = ['/usr/bin/createrepo_c', '--database', '--ignore-lock',
'--outputdir', os.path.join(path_fedora, "devel"),
- '--baseurl', '../', path_fedora]
+ '--baseurl', self.base_url, path_fedora]
for path, expected in [(path_epel_5, expected_epel_5), (path_fedora, expected_fedora)]:
os.makedirs(path)
- createrepo_unsafe(path, lock=None, base_url="../", dest_dir="devel")
+ createrepo_unsafe(path, lock=None, base_url=self.base_url, dest_dir="devel")
assert os.path.exists(os.path.join(path, "devel"))
assert mc_popen.call_args == mock.call(expected, stderr=-1, stdout=-1)
#
@@ -173,5 +174,5 @@ class TestCreaterepoUnsafe(object):
# for path in [path_epel_5, path_fedora]:
# os.makedirs(path)
#
- # createrepo_unsafe(path, lock=None, base_url="../", dest_dir="devel")
+ # createrepo_unsafe(path, lock=None, base_url=self.base_url, dest_dir="devel")
# assert os.path.exists(os.path.join(path, "devel"))
9 years, 6 months
[copr] master: [ci] run pep8 on keygen also (fe0bcf1)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit fe0bcf1a86282c55c9c72c4576a5aa4900392152
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Mon Nov 3 18:40:02 2014 +0100
[ci] run pep8 on keygen also
>---------------------------------------------------------------
test_suite.sh | 1 +
1 files changed, 1 insertions(+), 0 deletions(-)
diff --git a/test_suite.sh b/test_suite.sh
index e212b1e..ab7468b 100644
--- a/test_suite.sh
+++ b/test_suite.sh
@@ -43,6 +43,7 @@ pep8 --max-line-length=120 python/copr > _report/python-copr_pep8.txt || echo '
pep8 --max-line-length=120 backend/backend backend/copr-be.py > _report/backend_pep8.txt || echo 'pep8 did not finish with return code 0'
pep8 --max-line-length=120 frontend/coprs_frontend > _report/frontend_pep8.txt || echo 'pep8 did not finish with return code 0'
pep8 --max-line-length=120 cli/copr_cli > _report/copr-cli_pep8.txt || echo 'pep8 did not finish with return code 0'
+pep8 --max-line-length=120 keygen/src > _report/keygen_pep8.txt || echo 'pep8 did not finish with return code 0'
deactivate
rm -rf _tmp/*
9 years, 6 months
[copr] master: [ci] added pep8 reports for jenkins (93920f9)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit 93920f90c8bc3d51f8315097612131dbee930723
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Mon Nov 3 18:37:18 2014 +0100
[ci] added pep8 reports for jenkins
>---------------------------------------------------------------
test_suite.sh | 6 +++++-
1 files changed, 5 insertions(+), 1 deletions(-)
diff --git a/test_suite.sh b/test_suite.sh
index 7daaf59..e212b1e 100644
--- a/test_suite.sh
+++ b/test_suite.sh
@@ -39,6 +39,10 @@ mv {,_report/cli.}coverage.xml
PYTHONPATH=keygen/src:$PYTHONPATH python -B -m pytest keygen/tests --junitxml=_report/keygen.junit.xml --cov-report xml --cov keygen/src $@
mv {,_report/keygen.}coverage.xml
+pep8 --max-line-length=120 python/copr > _report/python-copr_pep8.txt || echo 'pep8 did not finish with return code 0'
+pep8 --max-line-length=120 backend/backend backend/copr-be.py > _report/backend_pep8.txt || echo 'pep8 did not finish with return code 0'
+pep8 --max-line-length=120 frontend/coprs_frontend > _report/frontend_pep8.txt || echo 'pep8 did not finish with return code 0'
+pep8 --max-line-length=120 cli/copr_cli > _report/copr-cli_pep8.txt || echo 'pep8 did not finish with return code 0'
+
deactivate
rm -rf _tmp/*
-
9 years, 6 months
[copr] master: Automatic commit of package [python-copr] release [1.53-1]. (d246d1a)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit d246d1ac378ad2a4daa923ab26faa6f0c1c2e53c
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Mon Nov 3 18:04:26 2014 +0100
Automatic commit of package [python-copr] release [1.53-1].
>---------------------------------------------------------------
python/python-copr.spec | 5 ++++-
rel-eng/packages/python-copr | 2 +-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/python/python-copr.spec b/python/python-copr.spec
index a6fa0c8..e6d5240 100644
--- a/python/python-copr.spec
+++ b/python/python-copr.spec
@@ -10,7 +10,7 @@
%endif
Name: python-copr
-Version: 1.52
+Version: 1.53
Release: 1%{?dist}
Summary: Python interface for Copr
@@ -165,6 +165,9 @@ popd
%endif
%changelog
+* Mon Nov 03 2014 Valentin Gologuzov <vgologuz(a)redhat.com> 1.53-1
+- [python-copr] syntax bugfix
+
* Mon Nov 03 2014 Valentin Gologuzov <vgologuz(a)redhat.com> 1.52-1
- [python-copr] removed log config from client
diff --git a/rel-eng/packages/python-copr b/rel-eng/packages/python-copr
index bb37ba2..2b66df9 100644
--- a/rel-eng/packages/python-copr
+++ b/rel-eng/packages/python-copr
@@ -1 +1 @@
-1.52-1 python/
+1.53-1 python/
9 years, 6 months
[copr] master: [python-copr] refix (ceb9825)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit ceb982595a417fd2c52a0a7ff699ff1aa0fb4d50
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Mon Nov 3 17:59:01 2014 +0100
[python-copr] refix
>---------------------------------------------------------------
python/copr/client/client.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diff --git a/python/copr/client/client.py b/python/copr/client/client.py
index 3a5d7d4..ad59f76 100644
--- a/python/copr/client/client.py
+++ b/python/copr/client/client.py
@@ -17,7 +17,7 @@ from six.moves import configparser
log = logging.getLogger(__name__)
-log.addHandler(logging.NullHandler)
+log.addHandler(logging.NullHandler())
from .exceptions import CoprConfigException, CoprNoConfException, \
CoprRequestException, \
9 years, 6 months
[copr] master: [backend] refactoring: explicit BuildJob class (e6027dc)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit e6027dcaa852b7f15238cfebbc712b0e1b28902e
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Mon Nov 3 17:43:40 2014 +0100
[backend] refactoring: explicit BuildJob class
>---------------------------------------------------------------
backend/backend/callback.py | 19 ++--
backend/backend/dispatcher.py | 111 ++++++--------------
backend/backend/job.py | 88 ++++++++++++++++
backend/backend/mockremote.py | 84 +++++++++-------
.../{copr-be.conf.example => copr-be.local.conf} | 9 +-
backend/copr-be.py | 18 ++-
.../coprs/views/backend_ns/backend_general.py | 3 +-
7 files changed, 196 insertions(+), 136 deletions(-)
diff --git a/backend/backend/callback.py b/backend/backend/callback.py
index 0172ebc..f76f5fd 100644
--- a/backend/backend/callback.py
+++ b/backend/backend/callback.py
@@ -8,37 +8,36 @@ class FrontendCallback(object):
Object to send data back to fronted
"""
- def __init__(self, opts, log=None):
+ def __init__(self, opts, events):
super(FrontendCallback, self).__init__()
self.frontend_url = opts.frontend_url
self.frontend_auth = opts.frontend_auth
- self.log = log
- self.msg = None
+ self.msg = None
def _post_to_frontend(self, data, url_path):
"""
Make a request to the frontend
"""
+
headers = {"content-type": "application/json"}
url = "{0}/{1}/".format(self.frontend_url, url_path)
auth = ("user", self.frontend_auth)
self.msg = None
- response = None
+
try:
response = requests.post(url, data=json.dumps(data), auth=auth,
- headers=headers)
+ headers=headers)
if response.status_code != 200:
self.msg = "Failed to submit to frontend: {0}: {1}".format(
response.status_code, response.text)
raise requests.RequestException(self.msg)
- except requests.RequestException, e:
+ except requests.RequestException as e:
self.msg = "Post request failed: {0}".format(e)
raise
return response
-
def _post_to_frontend_repeatedly(self, data, url_path, max_repeats=10):
"""
Make a request max_repeats-time to the frontend
@@ -49,22 +48,19 @@ class FrontendCallback(object):
response = self._post_to_frontend(data, url_path)
break
except requests.RequestException:
- if self.log:
- self.log(self.msg)
+
if repeats == max_repeats:
raise
repeats += 1
time.sleep(5)
return response
-
def update(self, data):
"""
Send data to be updated in the frontend
"""
self._post_to_frontend_repeatedly(data, "update")
-
def starting_build(self, build_id, chroot_name):
"""
Announce to the frontend that a build is starting.
@@ -76,4 +72,3 @@ class FrontendCallback(object):
if "can_start" not in response.json():
raise requests.RequestException("Bad respond from the frontend")
return response.json()["can_start"]
-
diff --git a/backend/backend/dispatcher.py b/backend/backend/dispatcher.py
index 080a9b8..f305d0c 100644
--- a/backend/backend/dispatcher.py
+++ b/backend/backend/dispatcher.py
@@ -11,14 +11,16 @@ import ansible
import ansible.runner
import ansible.utils
from ansible import callbacks
-from bunch import Bunch
+
from setproctitle import setproctitle
from IPy import IP
from retask.queue import Queue
+
from .exceptions import MockRemoteError, CoprWorkerError
+from .job import BuildJob
-import mockremote
-from callback import FrontendCallback
+from .mockremote import MockRemote, CliLogCallBack
+from .callback import FrontendCallback
ansible_playbook = "ansible-playbook"
@@ -142,7 +144,7 @@ class Worker(multiprocessing.Process):
self.callback = callback
self.create = create
self.lock = lock
- self.frontend_callback = FrontendCallback(opts)
+ self.frontend_callback = FrontendCallback(opts, events)
if not self.callback:
self.logfile = os.path.join(
self.opts.worker_logdir,
@@ -159,7 +161,7 @@ class Worker(multiprocessing.Process):
def event(self, topic, template, content=None):
""" Multi-purpose logging method.
- Logs messages to two different destinations:
+ Logs messages to three different destinations:
- To log file
- The internal "events" queue for communicating back to the
dispatcher.
@@ -188,7 +190,6 @@ class Worker(multiprocessing.Process):
# XXX - Maybe log traceback as well with traceback.format_exc()
self.callback.log("failed to publish message: {0}".format(e))
-
def _announce_start(self, job, ip="none"):
"""
Announce everywhere that a build process started now.
@@ -214,7 +215,6 @@ class Worker(multiprocessing.Process):
self.event("chroot.start", template, content)
-
def _announce_end(self, job, ip="none"):
"""
Announce everywhere that a build process ended now.
@@ -232,7 +232,6 @@ class Worker(multiprocessing.Process):
status=job.status, chroot=job.chroot)
self.event("build.end", template, content)
-
def run_ansible_playbook(self, args, name="running playbook", attempts=9):
"""
call ansible playbook
@@ -345,7 +344,6 @@ class Worker(multiprocessing.Process):
"the testing playbook. Spawning another one.")
self.terminate_instance(ipaddr)
-
def terminate_instance(self, instance_ip):
"""call the terminate playbook to destroy the building instance"""
@@ -362,47 +360,16 @@ class Worker(multiprocessing.Process):
ans_extra_vars_encode(term_args, "copr_task"))
self.run_ansible_playbook(args, "terminate instance")
-
- def create_job(self, task):
- """
- Create a Bunch from the task dict and add some stuff
- """
- job = Bunch()
- job.update(task)
-
- job.pkgs = [task["pkgs"]] # just for now
-
- job.repos = [r for r in task["repos"].split(" ") if r.strip()]
-
- if not task["timeout"]:
- job.timeout = self.opts.timeout
-
- job.destdir = os.path.normpath(
- os.path.join(self.opts.destdir,
- task["project_owner"],
- task["project_name"]))
-
- job.results = os.path.join(
- self.opts.results_baseurl,
- task["project_owner"],
- task["project_name"] + "/")
-
- job.pkg_version = ""
- job.built_packages = ""
-
- return job
-
-
def mark_started(self, job):
"""
Send data about started build to the frontend
"""
- build = {"id": job.build_id,
- "started_on": job.started_on,
- "results": job.results,
- "chroot": job.chroot,
- "status": 3, # running
- }
+
+ job.status = 3 # running
+ build = job.to_dict()
+
+ self.callback.log("build: {}".format(build))
+ #build["status"] = 3 # running
data = {"builds": [build]}
try:
@@ -411,7 +378,6 @@ class Worker(multiprocessing.Process):
raise CoprWorkerError(
"Could not communicate to front end to submit status info")
-
def return_results(self, job):
"""
Send the build results to the frontend
@@ -420,23 +386,16 @@ class Worker(multiprocessing.Process):
"{0} status {1}. Took {2} seconds".format(
job.build_id, job.status, job.ended_on - job.started_on))
- build = {
- "id": job.build_id,
- "ended_on": job.ended_on,
- "status": job.status,
- "chroot": job.chroot,
- "pkg_version": job.pkg_version,
- "built_packages": job.built_packages,
- }
-
- data = {"builds": [build]}
+ self.callback.log("build: {}".format(job.to_dict()))
+ data = {"builds": [job.to_dict()]}
try:
self.frontend_callback.update(data)
- except:
+ except Exception as err:
raise CoprWorkerError(
- "Could not communicate to front end to submit results")
-
+ "Could not communicate to front end to submit results: {}"
+ .format(err)
+ )
def starting_build(self, job):
"""
@@ -444,18 +403,17 @@ class Worker(multiprocessing.Process):
Return: True if the build can start
False if the build can not start (build is cancelled)
"""
- response = None
+
try:
- response = self.frontend_callback.starting_build(
- job.build_id,
- job.chroot)
- except:
+ response = self.frontend_callback.starting_build(job.build_id, job.chroot)
+ except Exception as err:
raise CoprWorkerError(
- "Could not communicate to front end to submit results")
+ "Could not communicate to front end to submit results: {}"
+ .format(err)
+ )
return response
-
@classmethod
def pkg_built_before(cls, pkgs, chroot, destdir):
"""
@@ -496,12 +454,13 @@ class Worker(multiprocessing.Process):
time.sleep(self.opts.sleeptime)
continue
- job = self.create_job(task.data)
+ job = BuildJob(task.data, self.opts)
setproctitle("worker-{0} {1} Task: {2}".format(
- self.opts.build_groups[self.group_id]["name"],
- self.worker_num,
- job.task_id))
+ self.opts.build_groups[self.group_id]["name"],
+ self.worker_num,
+ job.build_id
+ ))
# Checking whether the build is not cancelled
if not self.starting_build(job):
@@ -603,20 +562,16 @@ class Worker(multiprocessing.Process):
job.project_owner, job.project_name)
}
- mr = mockremote.MockRemote(
+ mr = MockRemote(
builder=ip,
- timeout=job.timeout,
- destdir=job.destdir,
- chroot=job.chroot,
+ job=job,
cont=True,
recurse=True,
repos=chroot_repos,
macros=macros,
lock=self.lock,
do_sign=self.opts.do_sign,
- build_id=job.build_id,
- buildroot_pkgs=job.buildroot_pkgs,
- callback=mockremote.CliLogCallBack(
+ callback=CliLogCallBack(
quiet=True, logfn=chrootlogfile),
front_url=self.opts.frontend_base_url,
)
diff --git a/backend/backend/job.py b/backend/backend/job.py
new file mode 100644
index 0000000..693651d
--- /dev/null
+++ b/backend/backend/job.py
@@ -0,0 +1,88 @@
+import copy
+import os
+
+
+class BuildJob(object):
+
+ def __init__(self, task_data, worker_opts):
+ """
+ Creates build job object
+ :param dict task_dict: dictionary with the following fields
+ (based frontend.models.Build)::
+
+ - pkgs: list of space separated urls of packages to build
+ - repos: list of space separated additional repos
+ - timeout: maximum allowed time of build, build will fail if exceeded # unused
+ - project_owner:
+ - project_name:
+ - submitter:
+
+ :param dict worker_opts: worker options, fields::
+
+ - destdir: worker root directory to store results
+ - results_baseurl: root url to stored results
+ - timeout: default worker timeout
+
+ """
+
+ self.timeout = worker_opts.timeout
+ self.memory_reqs = None
+
+ self.project_owner = None
+ self.project_name = None
+ self.submitter = None
+
+ self.ended_on = None
+ self.started_on = None
+ self.submitted_on = None
+
+ self.status = None
+ self.chroot = None
+
+ # TODO: validate update data
+ for key, val in task_data.items():
+ key = str(key)
+ setattr(self, key, val)
+
+ #self.__dict__.update(task_data)
+
+ self.pkgs = [task_data["pkgs"]] # just for now
+ self.repos = [r for r in task_data["repos"].split(" ") if r.strip()]
+ self.build_id = task_data["build_id"]
+
+ self.destdir = os.path.normpath(os.path.join(
+ worker_opts.destdir,
+ task_data["project_owner"],
+ task_data["project_name"]
+ ))
+
+ self.results = u"/".join([
+ worker_opts.results_baseurl,
+ task_data["project_owner"],
+ task_data["project_name"] + "/"
+ ])
+
+ self.pkg_version = ""
+ self.built_packages = ""
+
+ def update(self, data_dict):
+ """
+
+ :param dict data_dict:
+ """
+ # TODO: validate update data
+ self.__dict__.update(data_dict)
+
+ def to_dict(self):
+ """
+
+ :return dict: dictified build job
+ """
+ result = copy.deepcopy(self.__dict__)
+ result["id"] = self.build_id
+
+ return result
+
+ def __unicode__(self):
+ return u"BuildJob<id: {build_id}, owner: {project_owner}, " \
+ u"project: {project_name},>".format(self.__dict__)
diff --git a/backend/backend/mockremote.py b/backend/backend/mockremote.py
index 0dfb7f1..919fbd9 100755
--- a/backend/backend/mockremote.py
+++ b/backend/backend/mockremote.py
@@ -217,12 +217,12 @@ class CliLogCallBack(DefaultCallBack):
class Builder(object):
def __init__(self, hostname, username,
- timeout, mockremote, buildroot_pkgs):
+ timeout, chroot, mockremote, buildroot_pkgs):
self.hostname = hostname
self.username = username
self.timeout = timeout
- self.chroot = mockremote.chroot
+ self.chroot = chroot
self.repos = mockremote.repos
self.mockremote = mockremote
@@ -536,21 +536,25 @@ class MockRemote(object):
# mock remote now do too much things
# idea: send events according to the build progress to handler
- def __init__(self, builder=None, user=DEF_USER, timeout=DEF_TIMEOUT,
- destdir=DEF_DESTDIR, chroot=DEF_CHROOT, cont=False,
- recurse=False, repos=None, callback=None,
+ def __init__(self, builder=None, user=DEF_USER, job=None,
+ cont=False, recurse=False, repos=None, callback=None,
remote_basedir=DEF_REMOTE_BASEDIR, remote_tempdir=None,
- macros=None, lock=None, do_sign=False, build_id=None,
- buildroot_pkgs=DEF_BUILDROOT_PKGS, front_url=None):
+ macros=None, lock=None, do_sign=False, front_url=None):
"""
:param builder: builder hostname
:param user: user to run as/connect as on builder systems
- :param timeout: ssh timeout
- :param destdir: target directory to put built packages
- :param chroot: chroot config name/base to use in the mock build
- (e.g.: fedora20_i386 )
+ :param backend.job.BuildJob job: Job object with the following attributes::
+ :ivar timeout: ssh timeout
+ :ivar destdir: target directory to put built packages
+ :ivar chroot: chroot config name/base to use in the mock build
+ (e.g.: fedora20_i386 )
+ :ivar buildroot_pkgs: whitespace separated string with additional
+ packages that should present during build
+ :ivar build_id: copr build.id
+
+
:param cont: if a pkg fails to build, continue to the next one
:param bool recurse: if more than one pkg and it fails to build,
try to build the rest and come back to it
@@ -566,8 +570,7 @@ class MockRemote(object):
Copr backend process
:param bool do_sign: enable package signing, require configured
signer host and correct /etc/sign.conf
- :param buildroot_pkgs: whitespace separated string with additional
- packages that should present during build
+
:param str front_url: url to the copr frontend
"""
@@ -576,9 +579,10 @@ class MockRemote(object):
repos = DEF_REPOS
if macros is None:
macros = DEF_MACROS
- self.destdir = destdir
- self.chroot = chroot
+
+ self.job = job
self.repos = repos
+
self.cont = cont
self.recurse = recurse
self.callback = callback
@@ -593,9 +597,11 @@ class MockRemote(object):
self.callback = DefaultCallBack()
self.callback.log("Setting up builder: {0}".format(builder))
- self.builder = Builder(builder, user, timeout, self, buildroot_pkgs)
+ self.builder = Builder(hostname=builder, username=user, chroot=self.job.chroot,
+ timeout=self.job.timeout or DEF_TIMEOUT,
+ mockremote=self, buildroot_pkgs=self.job.buildroot_pkgs)
- if not self.chroot:
+ if not self.job.chroot:
raise MockRemoteError("No chroot specified!")
self.failed = []
@@ -606,19 +612,19 @@ class MockRemote(object):
def _get_pkg_destpath(self, pkg):
s_pkg = os.path.basename(pkg)
pdn = s_pkg.replace(".src.rpm", "")
- resdir = "{0}/{1}/{2}".format(self.destdir, self.chroot, pdn)
+ resdir = "{0}/{1}/{2}".format(self.job.destdir, self.job.chroot, pdn)
resdir = os.path.normpath(resdir)
return resdir
def add_pubkey(self, chroot_dir):
"""
Adds pubkey.gpg with public key to ``chroot_dir``
- using `copr_username` and `copr_projectname` from self.macros.
+ using `copr_username` and `copr_projectname` from self.job.
"""
self.callback.log("Retrieving pubkey ")
# TODO: sign repodata as well ?
- user = self.macros["copr_username"]
- project = self.macros["copr_projectname"]
+ user = self.job.project_owner
+ project = self.job.project_name
pubkey_path = os.path.join(chroot_dir, "pubkey.gpg")
try:
#TODO: uncomment this when key revoke/change will be implemented
@@ -638,7 +644,7 @@ class MockRemote(object):
def sign_built_packages(self, chroot_dir, pkg):
"""
Sign built rpms
- using `copr_username` and `copr_projectname` from self.macros
+ using `copr_username` and `copr_projectname` from self.job
by means of obs-sign. If user builds doesn't have a key pair
at sign service, it would be created through ``copr-keygen``
@@ -651,9 +657,8 @@ class MockRemote(object):
format(pkg, chroot_dir))
try:
- sign_rpms_in_dir(self.macros["copr_username"],
- self.macros["copr_projectname"],
- #os.path.join(chroot_dir, source_basename),
+ sign_rpms_in_dir(self.job.project_owner,
+ self.job.project_name,
get_target_dir(chroot_dir, pkg),
callback=self.callback)
except Exception as e:
@@ -721,7 +726,7 @@ class MockRemote(object):
# mockchain makes things with the chroot appended - so suck down
# that pkg subdir from w/i that location
chroot_dir = os.path.normpath(
- os.path.join(self.destdir, self.chroot))
+ os.path.join(self.job.destdir, self.job.chroot))
d_ret, d_out, d_err = self.builder.download(pkg, chroot_dir)
if not d_ret:
@@ -737,7 +742,7 @@ class MockRemote(object):
# destdir/chroot
if not os.path.exists(chroot_dir):
os.makedirs(
- os.path.join(self.destdir, self.chroot))
+ os.path.join(self.job.destdir, self.job.chroot))
self.log_to_file_safe(
os.path.join(chroot_dir, "mockchain.log"),
@@ -760,7 +765,7 @@ class MockRemote(object):
os.path.basename(pkg)))
else:
msg = "Error building {0}\nSee logs/results in {1}" \
- .format(os.path.basename(pkg), self.destdir)
+ .format(os.path.basename(pkg), self.job.destdir)
if not self.cont:
raise MockRemoteError(msg)
@@ -775,14 +780,14 @@ class MockRemote(object):
built_pkgs.append(pkg)
# createrepo with the new pkgs
- self.callback.log("Createrepo:: macros: {}; front url: {}; path: {}".format(
- self.macros, self.front_url, chroot_dir
+ self.callback.log("Createrepo:: owner: {}; project: {}; front url: {}; path: {}".format(
+ self.job.project_owner, self.job.project_name, self.front_url, chroot_dir
))
_, _, err = createrepo(
path=chroot_dir,
front_url=self.front_url,
- username=self.macros["copr_username"],
- projectname=self.macros["copr_projectname"],
+ username=self.job.project_owner,
+ projectname=self.job.project_name,
lock=self.lock,
)
if err.strip():
@@ -906,12 +911,19 @@ def main(args):
# setup our callback
callback = CliLogCallBack(logfn=opts.logfile, quiet=opts.quiet)
# our mockremote instance
+
+ class JobClass(object):
+ __slots__ = ["timeout", "destdir", "chroot"]
+
+ job = JobClass()
+ job.timeout = opts.timeout
+ job.destdir = opts.destdir
+ job.chroot = opts.chroot
+
mr = MockRemote(
+ job=job,
builder=opts.builder,
user=opts.user,
- timeout=opts.timeout,
- destdir=opts.destdir,
- chroot=opts.chroot,
cont=opts.cont,
recurse=opts.recurse,
repos=opts.repos,
@@ -935,7 +947,7 @@ def main(args):
mr.build_pkgs(pkgs)
if not opts.quiet:
- print("Output written to: {0}".format(mr.destdir))
+ print("Output written to: {0}".format(opts.destdir))
except MockRemoteError as e:
sys.stderr.write("Error on build:\n")
diff --git a/backend/copr-be.conf.example b/backend/copr-be.local.conf
similarity index 95%
copy from backend/copr-be.conf.example
copy to backend/copr-be.local.conf
index 0ae59bc..3885b04 100644
--- a/backend/copr-be.conf.example
+++ b/backend/copr-be.local.conf
@@ -66,7 +66,7 @@ group0_max_workers=8
# directory where results are stored
# should be accessible from web using 'results_baseurl' URL
# no default
-destdir=/var/lib/copr/public_html/results
+destdir=~/public_html/results
# how long (in seconds) backend should wait before query frontends
# for new tasks in queue
@@ -78,10 +78,10 @@ num_workers=5
# path to log file
# default is /var/log/copr/backend.log
-logfile=/var/log/copr/backend.log
+logfile=/tmp/copr/backend.log
# default is /var/log/copr/workers/
-worker_logdir=/var/log/copr/workers/
+worker_logdir=/tmp/log/copr/workers/
# exit on worker failure
# default is false
@@ -96,6 +96,9 @@ worker_logdir=/var/log/copr/workers/
# signer host and correct /etc/sign.conf
#do_sign=false
+verbose=true
+
+
[builder]
# default is 1800
timeout=3600
diff --git a/backend/copr-be.py b/backend/copr-be.py
index 4673e5e..0869059 100755
--- a/backend/copr-be.py
+++ b/backend/copr-be.py
@@ -63,11 +63,11 @@ class CoprJobGrab(multiprocessing.Process):
self.opts = opts
self.events = events
- self.task_queue = []
+ self.task_queues = []
for group in self.opts.build_groups:
- self.task_queue.append(Queue("copr-be-{0}".format(
+ self.task_queues.append(Queue("copr-be-{0}".format(
str(group["id"]))))
- self.task_queue[group["id"]].connect()
+ self.task_queues[group["id"]].connect()
self.added_jobs = []
self.lock = lock
@@ -103,7 +103,7 @@ class CoprJobGrab(multiprocessing.Process):
if arch in group["archs"]:
self.added_jobs.append(task["task_id"])
task_obj = Task(task)
- self.task_queue[group["id"]].enqueue(task_obj)
+ self.task_queues[group["id"]].enqueue(task_obj)
count += 1
break
if count:
@@ -115,7 +115,7 @@ class CoprJobGrab(multiprocessing.Process):
for action in r_json["actions"]:
ao = Action(self.events, action, self.lock, destdir=self.opts.destdir,
- frontend_callback=FrontendCallback(self.opts),
+ frontend_callback=FrontendCallback(self.opts, self.events),
front_url=self.opts.frontend_base_url)
ao.run()
@@ -146,22 +146,27 @@ class CoprLog(multiprocessing.Process):
if not os.path.exists(logdir):
os.makedirs(logdir, mode=0o750)
+ def setup_log_handler(self):
+ sys.stderr.write("Running setup handler {} \n".format(self.opts))
# setup a log file to write to
logging.basicConfig(filename=self.opts.logfile, level=logging.DEBUG)
+ self.log({"when": time.time(), "who": self.__class__.__name__, "what": "Logger iniated"})
+
def log(self, event):
when = time.strftime("%F %T", time.gmtime(event["when"]))
msg = "{0} : {1}: {2}".format(when,
event["who"],
event["what"].strip())
-
try:
if self.opts.verbose:
sys.stderr.write("{0}\n".format(msg))
sys.stderr.flush()
logging.debug(msg)
+
except (IOError, OSError) as e:
+
sys.stderr.write("Could not write to logfile {0} - {1}\n".format(
self.logfile, e))
@@ -169,6 +174,7 @@ class CoprLog(multiprocessing.Process):
# what:str}
def run(self):
setproctitle.setproctitle("CoprLog")
+ self.setup_log_handler()
abort = False
try:
while not abort:
diff --git a/frontend/coprs_frontend/coprs/views/backend_ns/backend_general.py b/frontend/coprs_frontend/coprs/views/backend_ns/backend_general.py
index ba5ed04..e03ec42 100644
--- a/frontend/coprs_frontend/coprs/views/backend_ns/backend_general.py
+++ b/frontend/coprs_frontend/coprs/views/backend_ns/backend_general.py
@@ -79,8 +79,9 @@ def update():
i -= 1
exc_info = sys.exc_info()[2]
time.sleep(5)
+
if i != -100:
- raise LockError, None, exc_info
+ raise LockError(None).with_traceback(exc_info)
result.update({"updated_{0}_ids".format(typ): list(existing.keys()),
"non_existing_{0}_ids".format(typ): non_existing_ids})
9 years, 6 months
[copr] master: Automatic commit of package [python-copr] release [1.52-1]. (b15120a)
by vgologuz@fedoraproject.org
Repository : http://git.fedorahosted.org/cgit/copr.git
On branch : master
>---------------------------------------------------------------
commit b15120a346c96de51d608fecc19f9a0f1356a1d5
Author: Valentin Gologuzov <vgologuz(a)redhat.com>
Date: Mon Nov 3 13:34:22 2014 +0100
Automatic commit of package [python-copr] release [1.52-1].
>---------------------------------------------------------------
python/python-copr.spec | 5 ++++-
rel-eng/packages/python-copr | 2 +-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/python/python-copr.spec b/python/python-copr.spec
index 4b936e0..a6fa0c8 100644
--- a/python/python-copr.spec
+++ b/python/python-copr.spec
@@ -10,7 +10,7 @@
%endif
Name: python-copr
-Version: 1.51
+Version: 1.52
Release: 1%{?dist}
Summary: Python interface for Copr
@@ -165,6 +165,9 @@ popd
%endif
%changelog
+* Mon Nov 03 2014 Valentin Gologuzov <vgologuz(a)redhat.com> 1.52-1
+- [python-copr] removed log config from client
+
* Tue Oct 07 2014 Valentin Gologuzov <vgologuz(a)redhat.com> 1.51-1
- [python-copr, cli] test coverage
- [python-copr, cli] updating copr-cli to use python-copr
diff --git a/rel-eng/packages/python-copr b/rel-eng/packages/python-copr
index 1f63dff..bb37ba2 100644
--- a/rel-eng/packages/python-copr
+++ b/rel-eng/packages/python-copr
@@ -1 +1 @@
-1.51-1 python/
+1.52-1 python/
9 years, 6 months