Skip to content

Commit

Permalink
remove code which referenced to old CrabCache. Fix dmwm#5113 and fix d…
Browse files Browse the repository at this point in the history
  • Loading branch information
belforte committed Nov 19, 2021
1 parent c0d33e0 commit ced6b07
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 65 deletions.
18 changes: 4 additions & 14 deletions src/python/CRABClient/ClientUtilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,20 +234,10 @@ def uploadlogfile(logger, proxyfilename, taskname=None, logfilename=None, logpat
cacheurl = server_info(crabserver=crabserver, subresource='backendurls')['cacheSSL']

logger.info("Uploading log file...")
if 'S3' in cacheurl.upper():
objecttype = 'clientlog'
uploadToS3(crabserver=crabserver, filepath=logpath, objecttype=objecttype, taskname=taskname, logger=logger)
logfileurl = getDownloadUrlFromS3(crabserver=crabserver, objecttype=objecttype, taskname=taskname, logger=logger)
else:
cacheurldict = {'endpoint': cacheurl, "pycurl": True}
ufc = UserFileCache(cacheurldict)
logger.debug("cacheURL: %s\nLog file name: %s" % (cacheurl, logfilename))
ufc.uploadLog(logpath, logfilename)
logfileurl = cacheurl + '/logfile?name='+str(logfilename)
if not username:
from CRABClient.UserUtilities import getUsername
username = getUsername(proxyFile=proxyfilename, logger=logger)
logfileurl += '&username='+str(username)
objecttype = 'clientlog'
uploadToS3(crabserver=crabserver, filepath=logpath, objecttype=objecttype, taskname=taskname, logger=logger)
logfileurl = getDownloadUrlFromS3(crabserver=crabserver, objecttype=objecttype, taskname=taskname, logger=logger)

logger.info("Log file URL: %s" % (logfileurl))
logger.info("%sSuccess%s: Log file uploaded successfully." % (colors.GREEN, colors.NORMAL))

Expand Down
7 changes: 1 addition & 6 deletions src/python/CRABClient/Commands/preparelocal.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,7 @@ def getInputFiles(self):

inputsFilename = os.path.join(os.getcwd(), 'InputFiles.tar.gz')
if status == 'UPLOADED':
filecacheurl = getColumn(crabDBInfo, 'tm_cache_url')
ufc = CRABClient.Emulator.getEmulator('ufc')({'endpoint' : filecacheurl, "pycurl": True})
self.logger.debug("Downloading and extracting 'dry-run-sandbox.tar.gz' from %s" % filecacheurl)
ufc.downloadLog('dry-run-sandbox.tar.gz', output=os.path.join(os.getcwd(), 'dry-run-sandbox.tar.gz'))
with tarfile.open('dry-run-sandbox.tar.gz') as tf:
tf.extractall()
raise ClientException('Currently crab upload only works for tasks successfully submitted')
elif status == 'SUBMITTED':
webdir = getProxiedWebDir(crabserver=self.crabserver, task=taskname,
logFunction=self.logger.debug)
Expand Down
10 changes: 2 additions & 8 deletions src/python/CRABClient/Commands/submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
if sys.version_info < (3, 0):
from urllib import urlencode, quote


import CRABClient.Emulator
from CRABClient.ClientUtilities import DBSURLS
from CRABClient.Commands.SubCommand import SubCommand
from CRABClient.ClientMapping import parametersMapping, getParamDefaultValue
Expand Down Expand Up @@ -379,12 +377,8 @@ def executeTestRun(self, filecacheurl, uniquerequestname):
tmpDir = tempfile.mkdtemp()
self.logger.info('Created temporary directory for dry run sandbox in %s' % tmpDir)
os.chdir(tmpDir)
if 'S3' in filecacheurl.upper():
downloadFromS3(crabserver=self.crabserver, filepath=os.path.join(tmpDir, 'dry-run-sandbox.tar.gz'),
objecttype='runtimefiles', taskname=uniquerequestname, logger=self.logger)
else:
ufc = CRABClient.Emulator.getEmulator('ufc')({'endpoint' : filecacheurl, "pycurl": True})
ufc.downloadLog('dry-run-sandbox.tar.gz', output=os.path.join(tmpDir, 'dry-run-sandbox.tar.gz'))
downloadFromS3(crabserver=self.crabserver, filepath=os.path.join(tmpDir, 'dry-run-sandbox.tar.gz'),
objecttype='runtimefiles', taskname=uniquerequestname, logger=self.logger)
for name in ['dry-run-sandbox.tar.gz', 'InputFiles.tar.gz', 'CMSRunAnalysis.tar.gz', 'sandbox.tar.gz']:
tf = tarfile.open(os.path.join(tmpDir, name))
tf.extractall(tmpDir)
Expand Down
46 changes: 9 additions & 37 deletions src/python/CRABClient/JobType/UserTarball.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import hashlib
import uuid

import CRABClient.Emulator
from CRABClient.ClientMapping import configParametersInfo
from CRABClient.JobType.ScramEnvironment import ScramEnvironment
from CRABClient.ClientUtilities import colors, BOOTSTRAP_CFGFILE, BOOTSTRAP_CFGFILE_PKL
Expand Down Expand Up @@ -279,42 +278,15 @@ def upload(self, filecacheurl=None):
(archiveName, archiveSize, filecacheurl))
self.logger.debug(msg)

if 'S3' in filecacheurl.upper():
# use S3
# generate a 32char hash like UserFileCache used to do
hashkey = calculateChecksum(archiveName, exclude=NEW_USER_SANDBOX_EXCLUSIONS)
# the ".tar.gz" suffix here is forced by other places in the client which add it when
# storing tarball name in task table. Not very elegant to need to hardcode in several places.
cachename = "%s.tar.gz" % hashkey
# current code requires a taskname to extract username. Any dummy one will do
# next version of RESTCache will get username from cmsweb FE headers
uploadToS3(crabserver=self.crabserver, objecttype='sandbox', filepath=archiveName,
tarballname=cachename, logger=self.logger)
else:
# old way using UFC
ufc = CRABClient.Emulator.getEmulator('ufc')({'endpoint' : filecacheurl, "pycurl": True})
t1 = time.time()
result = ufc.upload(archiveName, excludeList=NEW_USER_SANDBOX_EXCLUSIONS)
ufcSeconds = int(time.time()-t1)
if 'hashkey' not in result:
self.logger.error("Failed to upload archive: %s" % str(result))
raise CachefileNotFoundException
hashkey = str(result['hashkey'])
# upload a copy to S3 dev as well, just to stress it a bit, this never raises
s3report = testS3upload(self.s3tester, archiveName, hashkey, self.logger)
# report also how long it took uploading to UFC (which surely worked if we are here)
s3report['ufcseconds'] = ufcSeconds
# upload S3 test report to crabcache
reportFile = '/tmp/crabs3report.' + uuid.uuid4().hex
with open(reportFile, 'w') as fp:
json.dump(s3report, fp)
reportName = 'S3-' + s3report['timestamp'] + ':s3report.json'
try:
ufc.uploadLog(reportFile, reportName)
self.logger.debug('Report of S3 upload stored on CrabCache as %s', reportName)
except Exception as e:
self.logger.debug(str(e))
os.remove(reportFile)
# generate a 32char hash like old UserFileCache used to do
hashkey = calculateChecksum(archiveName, exclude=NEW_USER_SANDBOX_EXCLUSIONS)
# the ".tar.gz" suffix here is forced by other places in the client which add it when
# storing tarball name in task table. Not very elegant to need to hardcode in several places.
cachename = "%s.tar.gz" % hashkey
# current code requires a taskname to extract username. Any dummy one will do
# next version of RESTCache will get username from cmsweb FE headers
uploadToS3(crabserver=self.crabserver, objecttype='sandbox', filepath=archiveName,
tarballname=cachename, logger=self.logger)
return hashkey


Expand Down

0 comments on commit ced6b07

Please sign in to comment.