Update a file in python

60 Python code examples are found related to " update files". You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

Example 1

def keepJsonFilesWhenUpdate(self, currentDir, tempUpdateDir, *args): """ Check in given folder if we have custom json files and keep then when we install a new update. It will just check if there are user created json files, and copy them to temporary extracted update folder. So when the install overwrite all files, they will be copied (restored) again. """ newUpdateList = [] # list all new json files: for newRoot, newDirectories, newFiles in os.walk(tempUpdateDir): for newItem in newFiles: if newItem.endswith('.json'): newUpdateList.append(newItem) # check if some current json file is a custom file created by user to copy it to new update directory in order to avoid overwrite it: for currentRoot, currentDirectories, currentFiles in os.walk(currentDir): for currentItem in currentFiles: if currentItem.endswith('.json'): if not currentItem in newUpdateList: # found custom file, then copy it to keep when install the new update shutil.copy2(os.path.join(currentRoot, currentItem), tempUpdateDir)

Example 2

def update_recent_files_menu(self): if self.recentFiles: menu = tk.Menu(self.fileMenu) i = 1 for filename in self.recentFiles: if filename != self.editor.filename: menu.add_command(label="{}. {}".format(i, filename), underline=0, command=lambda filename=filename: self.load(filename)) i += 1 self.fileMenu.entryconfigure(OPEN_RECENT, menu=menu) self.fileMenu.entryconfigure(OPEN_RECENT, state=tk.NORMAL if i > 1 else tk.DISABLED) else: self.fileMenu.entryconfigure(OPEN_RECENT, state=tk.DISABLED)

Example 3

def update_wsnum_in_files(self, vernum): """ With the given version number ```vernum```, update the source's version number, and replace in the file hashmap. the version number is in the CHECKSUMS file. :param vernum: :return: """ self.version_num = vernum # replace the WSNUMBER in the url paths with the real WS### for f in self.files: url = self.files[f].get('url') url = re.sub(r'WSNUMBER', self.version_num, url) self.files[f]['url'] = url LOG.debug( "Replacing WSNUMBER in %s with %s", f, self.version_num) # also the letter file - keep this so we know the version number # self.files['checksums']['file'] = re.sub( # r'WSNUMBER', self.version_num, self.files['checksums']['file'])

Example 4

def remove_old_files_and_update_filesnames(filenames): all_directories = os.listdir(csvpath) # aggregate cou for collectd version < 5.5 aggregate_cpu = False # remove old csv files in datadir remove_old_files(os.path.join(home_path, data_dir), 'csv') for each_dir in all_directories: # remove old collectd log files remove_old_files(os.path.join(csvpath, each_dir), None) if "disk" in each_dir: filenames[each_dir + "/disk_octets-"] = [each_dir + '_DiskWrite', each_dir + '_DiskRead'] if "interface" in each_dir: filenames[each_dir + "/if_octets-"] = [each_dir + '_NetworkIn', each_dir + '_NetworkOut'] for fEntry in os.walk(os.path.join(csvpath)): if "cpu-" in fEntry[0]: aggregate_cpu = True filenames['aggregation-cpu-average/cpu-system-'] = ['CPU'] return aggregate_cpu

Example 5

def update_files(self): ls = [] for name in self.listdir(): absname = os.path.realpath(os.path.join(self.folder, name)) try: st = os.stat(absname) except EnvironmentError, err: if err.errno != errno.ENOENT: raise else: if not stat.S_ISREG(st.st_mode): continue fid = self.get_file_id(st) ls.append((fid, absname)) # check existent files

Example 6

def UpdateUsingSpecificFiles(self): """Updates and deploys new app versions based on given config files.""" rpcserver = self._GetRpcServer() all_files = [self.basepath] + self.args has_python25_version = False for yaml_path in all_files: file_name = os.path.basename(yaml_path) self.basepath = os.path.dirname(yaml_path) if not self.basepath: self.basepath = '.' server_yaml = self._ParseAppInfoFromYaml(self.basepath, os.path.splitext(file_name)[0]) if server_yaml.runtime == 'python': has_python25_version = True if not server_yaml.server and file_name != 'app.yaml': ErrorUpdate("Error: 'server' parameter not specified in %s" % yaml_path) continue self.UpdateVersion(rpcserver, self.basepath, server_yaml, file_name) if has_python25_version: MigratePython27Notice()

Example 7

def update_conf_files(): """ Downloads from nordvpn.com all the .ovpn files """ from bin.pathUtil import CURRENT_PATH logger.debug("Missing files, trying to download the .ovpn files") ovpn_download_link = '//downloads.nordcdn.com/configs/archives/servers/ovpn.zip' zip_filename = CURRENT_PATH + 'ovpn.zip' # downloading the zipped files r = requests.get(ovpn_download_link, allow_redirects=True) with open(zip_filename, 'wb') as zip_f: zip_f.write(r.content) # unzipping files import zipfile with zipfile.ZipFile(zip_filename, "r") as zip_ref: zip_ref.extractall(CURRENT_PATH) # removing zip from os import remove remove(zip_filename) logger.debug("Finished preparing ovpn files")

Example 8

def update_files_dictionary(self, *args): super(SPAMiddleware, self).update_files_dictionary(*args) index_page_suffix = '/' + self.index_name index_name_length = len(self.index_name) static_prefix_length = len(settings.STATIC_URL) - 1 directory_indexes = {} for url, static_file in self.files.items(): if url.endswith(index_page_suffix): # For each index file found, add a corresponding URL->content # mapping for the file's parent directory, # so that the index page is served for # the bare directory URL ending in '/'. parent_directory_url = url[:-index_name_length] directory_indexes[parent_directory_url] = static_file # remember the root page for any other unrecognised files # to be frontend-routed self.spa_root = static_file else: # also serve static files on / # e.g. when /my/file.png is requested, serve /static/my/file.png directory_indexes[url[static_prefix_length:]] = static_file self.files.update(directory_indexes)

Example 9

def update_recent_files(self): for i in range(5): recent_file_action = self.app.findChild(qg.QAction, 'itemRecent%d' % i) recent_file_action.setVisible(False) if len(self.recent_files_service.get_files()) > 0: recent_empty_action = self.app.findChild(qg.QAction, 'itemRecentEmpty') recent_empty_action.setVisible(False) for i, filepath in enumerate(self.recent_files_service.get_files()): recent_file_action = self.app.findChild(qg.QAction, 'itemRecent%d' % i) recent_file_action.setVisible(True) recent_file_action.setText(filepath) recent_file_action.triggered.disconnect() recent_file_action.triggered.connect(self.open_recent(filepath))

Example 10

def UpdateDefaultFiles(self): default_files = [('wordpress', initializer.wp_defaultFiles, initializer.wp_defaultFolders), ('joomla',initializer.joo_defaultFiles, initializer.joo_defaultFolders), ('drupal',initializer.dru_defaultFiles, initializer.dru_defaultFolders), ('moodle',initializer.moo_defaultFiles, initializer.moo_defaultFolders)] for cms_type,defaultFiles,defaultFolders in default_files : msg = "Updating "+cms_type+" default files" report.message(msg) p = subprocess.Popen("find "+self.cmsmapPath+ "/tmp/"+cms_type+" -type f -name '*.txt' -o -name '*.html' -o -name '*.sql'| sed 's|"+self.cmsmapPath+ "/tmp/"+cms_type+"||g'",stdout=subprocess.PIPE,shell=True, universal_newlines=True) output, error = p.communicate() f = open(defaultFiles, "a") f.write(output) f.close() msg = "Updating "+cms_type+" default folders" report.message(msg) p = subprocess.Popen("find "+self.cmsmapPath+ "/tmp/"+cms_type+" -maxdepth 2 -type d | sed 's|"+self.cmsmapPath+ "/tmp/"+cms_type+"||g'",stdout=subprocess.PIPE,shell=True, universal_newlines=True) output, error = p.communicate() f = open(defaultFolders, "a") f.write(output) f.close()

Example 11

def update_recent_files(self): """Update only the recent file list in settings file""" if self.error_lock == True: return # Import the init file as a python module init_module = runpy.run_path( self.settings_filename_with_path, init_globals = {"themes": themes, "data": data} ) # Update only the recent file list stored_sessions = self._parse_sessions(init_module["sessions"]) # Save the updated settings self.write_settings_file( init_module["main_window_side"], init_module["theme"], self.recent_files, stored_sessions, self.context_menu_functions )

Example 12

def getFilesToUpdate(self): """ Search on each folder any differences :return: three items (auxiliary exploits, generic exploits, routers exploits) """ local, remote = self.getJSONFiles() if local and remote and local['id'] != remote['id']: local = local['modules'] remote = remote['modules'] # auxiliary auxiliary = self.searchFilesOnFolder('auxiliary', local, remote) # generic generic = self.searchFilesOnFolder('generic', local, remote) # routers routers = self.searchFilesOnFolder('routers', local, remote) return auxiliary, generic, routers else: return [], [], []

Example 13

def update_files(targets, version, filename, search, replace): errors = False for target in targets: curr_file = find(filename, target) if curr_file is None: print("file missing: {}/{}".format(target, filename)) continue with open(curr_file) as _file: text = _file.read() if version in text: print("{} already contans version {}".format(curr_file, version)) errors = True continue with open(curr_file, "w") as _file: _file.write(re.sub(search, replace, text)) if errors: sys.exit(1)

Example 14

def update_files(self, files): """Update files with collection of DatasetFile objects.""" to_insert = [] for new_file in files: existing_file = self.find_file(new_file.path) if existing_file is None: to_insert.append(new_file) else: existing_file.commit = new_file.commit existing_file._label = new_file._label existing_file.based_on = new_file.based_on self.files += to_insert

Example 15

def updateFiles(self, d_name): # Manifest and Userlist grabber try: QApplication.setOverrideCursor(QtCore.Qt.WaitCursor) page = urllib.request.urlopen( '//github.com/gaining/Resetter/tree/master/Resetter/usr/lib/resetter/data/' + d_name) soup = BeautifulSoup(page, 'html.parser', from_encoding=page.info().get_param('charset')) data = soup.findAll('tr', attrs={'class': 'js-navigation-item'}) for link in data: real_link = link.findAll('a') for a in real_link: if 'blob' in str(a): fname = str(a['href']).split('/')[-1] print(fname) file_data = urllib.request.urlopen( "//raw.githubusercontent.com/gaining/Resetter/master/Resetter" "/usr/lib/resetter/data/" + d_name + "/" + fname) output = file_data.read().decode() with open(d_name + "/" + fname, 'w') as f: f.write(output) except urllib.request.URLError as e: QApplication.restoreOverrideCursor() UsefulTools().showMessage("Failed", "Could not update " + d_name + " " + str(e.reason), QMessageBox.Critical) else: QApplication.restoreOverrideCursor() UsefulTools().showMessage("Done!", d_name + " directory has been updated", QMessageBox.Information)

Example 16

def update_external_files(self, records): """Update files linked to external storage.""" updated_files_paths = [] updated_datasets = {} for file_ in records: if file_.external: path = self.path / file_.path link = path.parent / os.readlink(path) pointer_file = self.path / link pointer_file = self._update_pointer_file(pointer_file) if pointer_file is not None: relative = os.path.relpath(pointer_file, path.parent) os.remove(path) os.symlink(relative, path) updated_files_paths.append(str(path)) updated_datasets[file_.dataset.short_name] = file_.dataset if not updated_files_paths: return self.repo.git.add(*updated_files_paths, force=True) self.repo.git.add(self.renku_pointers_path, force=True) commit = self.repo.index.commit( 'renku dataset: updated {} external files'.format( len(updated_files_paths) ) ) for dataset in updated_datasets.values(): for file_ in dataset.files: if str(self.path / file_.path) in updated_files_paths: file_.commit = commit file_._label = file_.default_label() dataset.to_yaml()

Example 17

def update_data_files(distribution): """Update package_data to catch changes during setup.""" build_py = distribution.get_command_obj('build_py') distribution.data_files = get_data_files() # distribution.package_data = find_package_data() # re-init build_py options which load package_data build_py.finalize_options()

Example 18

def updateClearFiles(self): self.lClear.clear() self.lClear.setSelectionMode(QAbstractItemView.MultiSelection) for d in self.historyfiles: self.lClear.addItem(d)

Example 19

def update_files(self): ls = [] for name in self.filelist: absname = os.path.realpath(os.path.join(name)) try: st = os.stat(absname) except EnvironmentError as err: if err.errno != errno.ENOENT: raise else: if not stat.S_ISREG(st.st_mode): continue fid = self.get_file_id(st) ls.append((fid, absname)) # check existent files for fid, file in list(iter(self.files_map.items())): # next(iter(graph.items())) try: st = os.stat(file.name) except EnvironmentError as err: if err.errno == errno.ENOENT: self.unwatch(file, fid) else: raise else: if fid != self.get_file_id(st): # same name but different file (rotation); reload it. self.unwatch(file, fid) self.watch(file.name) # add new ones for fid, fname in ls: if fid not in self.files_map: self.watch(fname)

Example 20

def updateOutputFilesXMLWithSURLs4NG(self, experiment, siteWorkdir, jobId, outputFilesXML): """ Update the OutputFiles.xml file with SURLs """ status = False # open and read back the OutputFiles.xml file _filename = os.path.join(siteWorkdir, outputFilesXML) if os.path.exists(_filename): try: f = open(_filename, "r") except Exception, e: tolog("!!WARNING!!1990!! Could not open file %s: %s" % (_filename, e)) else: # get the metadata xmlIN = f.read() f.close() # update the XML xmlOUT = updateXMLWithSURLs(experiment, xmlIN, siteWorkdir, jobId, self.__jobrec, format='NG') # write the XML try: f = open(_filename, "w") except OSError, e: tolog("!!WARNING!!1990!! Could not open file %s: %s" % (_filename, e)) else:

Example 21

def update_recent_files(self, filename, populateMenu=True): if filename not in self.recentFiles: self.recentFiles.insert(0, filename) self.recentFiles = self.recentFiles[:MAX_RECENT_FILES] if populateMenu: self.update_recent_files_menu()

Example 22

def updateAllSavedLicFiles(window): # Useful for when too many new features accumulate in LicBinaryReader & Writer. # Use this to open each .lic file in the project, save it & close it. for root, unused, files in os.walk("D:\\LeJOS\\Instructions\\Creator"): for f in files: if f[-3:] == 'lic': fn = os.path.join(root, f) print "Trying to open %s" % fn window.locationOpen(fn) if window.instructions.licFileVersion != FileVersion: window.fileSave() print "Successful save %s" % fn window.fileClose()

Example 23

def update_manually_removed_files_since_last_run(self): """ Update files that have been removed from the downloads directory since the last run """ def update_manually_removed_files(transaction: sqlite3.Connection): files = {} query = "select stream_hash, download_directory, file_name from file where saved_file=1 " \ "and stream_hash is not null" for (stream_hash, download_directory, file_name) in transaction.execute(query).fetchall(): if download_directory and file_name: files[stream_hash] = download_directory, file_name return files def detect_removed(files): return [ stream_hash for stream_hash, (download_directory, file_name) in files.items() if not os.path.isfile(os.path.join(binascii.unhexlify(download_directory).decode(), binascii.unhexlify(file_name).decode())) ] def update_db_removed(transaction: sqlite3.Connection, removed): query = "update file set file_name=null, download_directory=null, saved_file=0 where stream_hash in {}" for cur in _batched_select(transaction, query, removed): cur.fetchall() stream_and_file = await self.db.run(update_manually_removed_files) removed = await self.loop.run_in_executor(None, detect_removed, stream_and_file) if removed: await self.db.run(update_db_removed, removed)

Example 24

def update_files(self) -> None: """Update files when the config changed.""" if not config.val.content.host_blocking.lists: try: os.remove(self._local_hosts_file) except FileNotFoundError: pass except OSError as e: logger.exception("Failed to delete hosts file: {}".format(e))

Example 25

def update_files(self, url, ad_info): MAX_FILENAME_LENGTH = 245 ad_page_soup, ad_title, ad_submitter, ad_url = ( ad_info["ad_page_soup"], ad_info["ad_title"], ad_info["ad_submitter"], ad_info["ad_url"], ) # save url to file, so as not to send a message to them again with open( os.path.join(self.ad_links_folder, "WG Ad Links.csv"), "a", newline="", encoding="utf-8", ) as file_write: csv_file_write = csv.writer(file_write) csv_file_write.writerow([url, ad_submitter, ad_title]) # save a copy of the ad for offline viewing, in case the ad is deleted before the user can view it online max_ad_title_length = MAX_FILENAME_LENGTH - len(ad_submitter) - len(ad_url) if len(ad_title) > max_ad_title_length: ad_title = ad_title[: max_ad_title_length - 1] + "..." file_name = "{}-{}-{}".format(ad_submitter, ad_title, ad_url) try: with open( os.path.join(self.offline_ad_folder, file_name), "w", encoding="utf-8" ) as outfile: outfile.write(str(ad_page_soup)) except OSError as err: if err.errno == errno.ENAMETOOLONG: self.logger.exception( "File name of {} is too long, could not save this ad offline".format( file_name ) )

Example 26

def updateStateFiles(self, data, alat, alon, agid): """Update initial state files with *data*.""" _, vegparam, snowbands = self.models[0].paramFromDB() veg = state.readVegetation("{0}/{1}".format(rpath.data, vegparam)) bands, _ = state.readSnowbands("{0}/{1}".format(rpath.data, snowbands)) for e, statefile in enumerate(self.statefiles): states, nlayer, nnodes, dateline = state.readStateFile(statefile) for var in data: x = state.readVariable(self.models[e], states, alat[var], alon[ var], veg, bands, nlayer, var) states = state.updateVariable(self.models[e], states, x, data[var][:, e], alat[ var], alon[var], agid, veg, bands, nlayer, var) state.writeStateFile(statefile, states, "{0}\n{1} {2}".format( dateline.strip(), nlayer, nnodes))

Example 27

def update_files(self): ls = [] for folder, name in self.listFiles(self.includeSubFolder): absname = os.path.realpath(os.path.join(folder, name)) if self.isExcluded(absname): continue try: st = os.stat(absname) except EnvironmentError as err: if err.errno != errno.ENOENT: raise else: if not stat.S_ISREG(st.st_mode): continue fid = self.get_file_id(st) ls.append((fid, absname)) # check existent files for fid, file in list(self._files_map.items()): try: st = os.stat(file.name) except EnvironmentError as err: if err.errno == errno.ENOENT: self.unwatch(file, fid) else: raise else: if fid != self.get_file_id(st): # same name but different file (rotation); reload it. self.unwatch(file, fid) self.watch(file.name) # add new ones for fid, fname in ls: if fid not in self._files_map: self.watch(fname)

Example 28

def update_status_files(blocking_events): # The files are served by Nginx return # FIXME # This contains the last status change for every cc/test_name/input # that ever had a block/unblock event status = {k: v[-1] for k, v in blocking_events.items()} statusfile = conf.statusdir / f"status.json" d = dict(format=1, status=status) with statusfile.open("w") as f: ujson.dump(d, f) log.debug("Wrote %s", statusfile)

Example 29

def update_recent_files_list(self, new_file=None): "Load and update the recent files list and menus" rf_list = [] if os.path.exists(self.recent_files_path): with open(self.recent_files_path, 'r') as rf_list_file: rf_list = rf_list_file.readlines() if new_file: new_file = os.path.abspath(new_file) + '\n' if new_file in rf_list: rf_list.remove(new_file) # move to top rf_list.insert(0, new_file) # clean and save the recent files list bad_paths = [] for path in rf_list: if '\0' in path or not os.path.exists(path[0:-1]): bad_paths.append(path) rf_list = [path for path in rf_list if path not in bad_paths] ulchars = "1234567890ABCDEFGHIJK" rf_list = rf_list[0:len(ulchars)] try: with open(self.recent_files_path, 'w') as rf_file: rf_file.writelines(rf_list) except IOError as err: if not getattr(self.root, "recentfilelist_error_displayed", False): self.root.recentfilelist_error_displayed = True tkMessageBox.showerror(title='IDLE Error', message='Unable to update Recent Files list:\n%s' % str(err), parent=self.text) # for each edit window instance, construct the recent files menu for instance in self.top.instance_dict.keys(): menu = instance.recent_files_menu menu.delete(0, END) # clear, and rebuild: for i, file_name in enumerate(rf_list): file_name = file_name.rstrip() # zap \n # make unicode string to display non-ASCII chars correctly ufile_name = self._filename_to_unicode(file_name) callback = instance.__recent_file_callback(file_name) menu.add_command(label=ulchars[i] + " " + ufile_name, command=callback, underline=0)

Example 30

def updateVPNFiles(vpn_provider): # If the OVPN files aren't generated then they need to be updated with location info infoTrace("vpnproviders.py", "Updating VPN profiles for " + vpn_provider) # Get the list of VPN profile files if isUserDefined(vpn_provider): ovpn_connections = getAddonList(vpn_provider, "*.ovpn") else: ovpn_connections = getDownloadList(vpn_provider, "*.ovpn") # Open a translate file if allowViewSelection(vpn_provider): try: debugTrace("Opening translate file for " + vpn_provider) translate_file = open(getAddonPath(True, vpn_provider + "/TRANSLATE.txt"), 'w') debugTrace("Opened translate file for " + vpn_provider) except Exception as e: errorTrace("vpnproviders.py", "Couldn't open the translate file for " + vpn_provider) errorTrace("vpnproviders.py", str(e)) return False success = False for connection in ovpn_connections: # Update each ovpn file based on settings, etc success, translate_location, translate_server, server_count, proto = updateVPNFile(connection, vpn_provider) if not success: break if allowViewSelection(vpn_provider): # Update the translate file with the server info needed if server_count > 1: translate_server = translate_server + " & " + str(server_count - 1) + " more" translate_file.write(translate_location + "," + translate_server + " (" + proto.upper() + ")\n") if allowViewSelection(vpn_provider): translate_file.close() if success: # Flag that the files have been generated writeGeneratedFile(vpn_provider) return success

Example 31

def update_files_with_custom_filter(self): """ Get the new file count with the user custom filter text :return: file count """ self.available_records() self.on_dir_path() self.populate_entities_widget()

Example 32

def update_watched_files(self, reindex: bool = False) -> Dict: watched_files = {} # type: Dict[str, float] watched_files_crc = {} # type: Dict[str, str] if not self.watched_files or reindex: for r in self.root: for root, dirs, files in os.walk(r, topdown=True): dirs[:] = [d for d in dirs if d not in self.ignored_dirs] for file in files: file_path = os.path.join(root, file) _dir = os.path.dirname(file_path) if _dir not in self.ignored_dirs and not any([os.path.join(root, _dir).endswith('/{}'.format(ignored_dir)) or '/{}/'.format(ignored_dir) in os.path.join(root, _dir) for ignored_dir in self.ignored_dirs]) and any([file.endswith(ending) for ending in self.watched_file_endings]) and '/.' not in file_path: watched_files[file_path] = os.path.getmtime(file_path) watched_files_crc[file_path] = crc(file_path) if watched_files[file_path] != self.watched_files.get(file_path) else self.watched_files_crc.get(file_path, '') else: for file_path, mtime in self.watched_files.items(): try: watched_files[file_path] = os.path.getmtime(file_path) watched_files_crc[file_path] = crc(file_path) if watched_files[file_path] != self.watched_files.get(file_path) else self.watched_files_crc.get(file_path, '') except FileNotFoundError: pass if self.watched_files and self.watched_files != watched_files and self.watched_files_crc == watched_files_crc: self.watched_files = watched_files if self.watched_files and self.watched_files != watched_files: added = [k[((len(self.root[0]) if k.startswith(self.root[0]) else -1) + 1):] for k in watched_files.keys() if k not in self.watched_files.keys()] removed = [k[((len(self.root[0]) if k.startswith(self.root[0]) else -1) + 1):] for k in self.watched_files.keys() if k not in watched_files.keys()] updated = [k[((len(self.root[0]) if k.startswith(self.root[0]) else -1) + 1):] for k in watched_files.keys() if k in self.watched_files.keys() and self.watched_files[k] != watched_files[k]] self.watched_files = watched_files self.watched_files_crc = watched_files_crc return {'added': added, 'removed': removed, 'updated': updated} self.watched_files = watched_files self.watched_files_crc = watched_files_crc return {}

Example 33

def updateFilesData(self, configuration_type: str, version: int, files_data: List[str], file_names_without_extension: List[str]) -> Optional[FilesDataUpdateResult]: old_configuration_type = configuration_type # Keep converting the file until it's at one of the current versions. while (configuration_type, version) not in self._current_versions: if (configuration_type, version) not in self._upgrade_routes: # No version upgrade plug-in claims to be able to upgrade this file. return None new_type, new_version, upgrade_step = self._upgrade_routes[(configuration_type, version)] new_file_names_without_extension = [] # type: List[str] new_files_data = [] # type: List[str] for file_idx, file_data in enumerate(files_data): try: upgrade_step_result = upgrade_step(file_data, file_names_without_extension[file_idx]) except Exception: # Upgrade failed due to a coding error in the plug-in. Logger.logException("w", "Exception in %s upgrade with %s: %s", old_configuration_type, upgrade_step.__module__, traceback.format_exc()) return None if upgrade_step_result: this_file_names_without_extension, this_files_data = upgrade_step_result else: # Upgrade failed. Logger.log("w", "Unable to upgrade the file %s with %s.%s. Skipping it.", file_names_without_extension[file_idx], upgrade_step.__module__, upgrade_step.__name__) return None new_file_names_without_extension += this_file_names_without_extension new_files_data += this_files_data file_names_without_extension = new_file_names_without_extension files_data = new_files_data version = new_version configuration_type = new_type return FilesDataUpdateResult(configuration_type = configuration_type, version = version, files_data = files_data, file_names_without_extension = file_names_without_extension)

Example 34

def update_version_files(args): data, version_path = get_config_file(args.version_file) version_data = data["version_data"] if args.version: version_data["version"] = args.version if args.post: version_data["post"] = args.post if args.rc: version_data["rc"] = args.rc if args.codename: version_data["codename"] = args.codename # Write the updated version_data into the file. with open(version_path, "wt") as fd: fd.write(yaml.safe_dump(data, default_flow_style=False)) # Should not happen but just in case... contents = _VERSION_TEMPLATE % ( escape_string(args.version_file), escape_string(json.dumps(version_data, indent=4))) + _VERSION_CODE # Now copy the static version files to all locations. for path in data["dependent_versions"]: current_dir = os.path.abspath(os.path.dirname( os.path.abspath(__file__))) version_path = os.path.abspath(os.path.join(current_dir, path)) if not os.path.relpath(version_path, current_dir): raise TypeError("Dependent version path is outside tree.") with open(version_path, "wt") as fd: fd.write(contents) update_templates(version_data)

Example 35

def update_files(self, left, priorities): metainfo = self.torrent.metainfo for name, left, total, in itertools.izip(metainfo.orig_files, left, metainfo.sizes): if total == 0: p = 1 else: p = (total - left) / total item = self.path_items[name] newvalue = "%.1f" % (int(p * 1000)/10) oldvalue = self.GetItemText(item, 2) if oldvalue != newvalue: self.SetItemText(item, newvalue, 2) if name in priorities: self.set_priority(item, priorities[name])

Example 36

def update_recent_files(self, path): if self._recent_files and path == self._recent_files[0]: return if self._recent_files is not None: if path in self._recent_files: self._recent_files.remove(path) self._recent_files.insert(0, path) self._recent_files = self._recent_files[:self._recent_files_max_count] self._update_recent_files_ui()

Example 37

def update_dataset_files_path(self, mode: RunMode): dataset_name = "dataset/{}.0.tfrecords".format(mode.value) dataset_path = os.path.join(self.project_path, dataset_name) dataset_path = dataset_path.replace("\\", '/') self.sample_map[DatasetType.TFRecords][mode].delete(0, tk.END) self.sample_map[DatasetType.TFRecords][mode].insert(tk.END, dataset_path) self.save_conf()

Example 38

def update_item_media_files(self, ark_id, uuid): """ updates media files associated with an item """ old_files = Mediafile.objects.filter(uuid=uuid) for old_file in old_files: media_files = None if self.BASE_MERRITT not in old_file.file_uri: # the file_uri is not in Merritt, so do update # processes if media_files is None: # a file_uri is not in Merritt, so go to # Merritt and get the files for this item media_files = self.get_item_media_files(ark_id, uuid) if isinstance(media_files, list): # we have a list of media files from Merritt # so now check to update if old_file.file_type in self.FILE_TYPE_MAPPINGS: type_pattern = urlquote(self.FILE_TYPE_MAPPINGS[old_file.file_type], safe='') found_file = False for media_file in media_files: if type_pattern in media_file: found_file = media_file break if found_file is not False: if uuid not in self.updated_uuids: self.updated_uuids.append(uuid) self.updated_file_count += 1 old_file.file_uri = found_file old_file.save() output = '\n\n' output += 'Saved file: ' + str(self.updated_file_count) output += ' of uuid: ' + str(len(self.updated_uuids)) output += '\n' output += found_file print(output)

Example 39

def update_train_files(self): if not self.file_slots_generated: self.file_slots_generated = True self.file_slots_btn.setDisabled(True) for file in range(self.choose_n_files.value()): c = QtWidgets.QLabel("{}:".format(file)) self.train_files_grid.addWidget(c, file, 0) self.f_btn = QtWidgets.QPushButton("Load File", self) self.f_btn.clicked.connect(lambda _, fi=file: self.load_train_file(fi)) self.train_files_grid.addWidget(self.f_btn, file, 1) self.f_btns.append(self.f_btn) la = QtWidgets.QLabel("Name:".format(file)) self.train_files_grid.addWidget(la, file, 2) id = QtWidgets.QLineEdit(self) id.move(20, 20) id.resize(500, 40) id.setMaxLength(10) self.train_files_grid.addWidget(id, file, 3) self.classes_name.append(id)

Example 40

def UpdateLinksInBranchedFiles(self): for f in self.files_to_branch: source_file = os.path.join(self.wiki_dir, f) versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f) print 'Updating links in %s.' % (versioned_file,) text = file(source_file, 'r').read() new_text = self.search_for_re.sub(self.replace_with, text) file(versioned_file, 'w').write(new_text)

Example 41

def update_md_files(md_directory, file_list, snippet_list): """Update md files from snippets. Args: md_directory: Directory to look for md files in. snippet_list: Array of snippets to put into the md files. """ for md_file in file_list: path = find_file(md_file, md_directory) if not path: print >> sys.stderr, 'WARNING: Cannot find %s, skipping.' % md_file continue new_file_handle = tempfile.NamedTemporaryFile(delete=False) temp_file_name = new_file_handle.name write_lines = True try: with open(path, 'r') as mdfile: for line in mdfile: match = MDDOXYSNIPPETSTART_RE.match(line) if match: token = match.groupdict()['token'] new_file_handle.write(line) if snippet_list.has_key(token): write_lines = False for snippet_line in snippet_list[token]: new_file_handle.write(snippet_line) elif DOXYSNIPPETEND_RE.match(line): write_lines = True new_file_handle.write(line) elif write_lines: new_file_handle.write(line) except IOError as e: print >> sys.stderr, ( 'ERROR: Failed to open file %s: %s' % (md_file, e.strerror)) os.remove(path) continue if write_lines is False: print >> sys.stderr, 'WARNING: Count not find end of %s.' % (token) new_file_handle.close() os.remove(path) shutil.move(temp_file_name, path)

Example 42

def update_included_files(new_object: FileObject, old_object: dict) -> list: old_fi = old_object['files_included'] new_fi = new_object.files_included old_fi.extend(new_fi) old_fi = list(set(old_fi)) return old_fi

Example 43

def UpdateUsingSpecificFiles(self): """Updates and deploys new app versions based on given config files.""" rpcserver = self._GetRpcServer() all_files = [self.basepath] + self.args has_python25_version = False for yaml_path in all_files: file_name = os.path.basename(yaml_path) self.basepath = os.path.dirname(yaml_path) if not self.basepath: self.basepath = '.' module_yaml = self._ParseAppInfoFromYaml(self.basepath, os.path.splitext(file_name)[0]) if module_yaml.runtime == 'python': has_python25_version = True if module_yaml.vm is True: MigrateGcloudNotice() if not module_yaml.module and file_name != 'app.yaml': ErrorUpdate("Error: 'module' parameter not specified in %s" % yaml_path) continue self.UpdateVersion(rpcserver, self.basepath, module_yaml, file_name) if has_python25_version: MigratePython27Notice()

Example 44

def update_search_files(self): #Replace file lists with search results and redraw icons del self.file_list[:] del self.detailed_file_list[:] self.file_list = self.ftpController.get_search_file_list() self.detailed_file_list = self.ftpController.get_detailed_search_file_list() self.draw_icons()

Example 45

def update_files(self, files, export_patterns, parameters, item): # POST //api.crowdin.com/api/project/{project-identifier}/update-file?key={project-key} url = {'post': 'POST', 'url_par1': '/api/project/', 'url_par2': True, 'url_par3': '/update-file', 'url_par4': True} if item[0] == '/': sources = item[1:] else: sources = item params = {'json': 'json', 'export_patterns[{0}]'.format(sources): export_patterns, 'titles[{0}]'.format(sources): parameters.get('titles'), 'first_line_contains_header': parameters.get('first_line_contains_header'), 'scheme': parameters.get('scheme'), 'update_option': parameters.get('update_option'), 'escape_quotes': parameters.get('escape_quotes', '3')} if self.any_options.branch: params['branch'] = self.any_options.branch additional_parameters = {'file_name': sources, 'action_type': "Updating"} try: with open(files, 'rb') as f: api_files = {'files[{0}]'.format(sources): f} # print files return self.true_connection(url, params, api_files, additional_parameters) except(OSError, IOError) as e: print(e, "\n Skipped")

Example 46

def update_config_files_mru_menu_items(self): app_utils.update_mru_menu_items(self.recent_config_files, self.action_open_recent_files, self.on_config_file_mru_action_triggered, self.app_config.app_config_file_name, self.on_config_file_mru_clear_triggered)

Example 47

def update_po_files(): def get_current_msgids(): pot = polib.pofile("locale/en/LC_MESSAGES/django.po") return {e.msgid for e in pot if not e.fuzzy and not e.obsolete} # get the current set of msgids saved_msgids = get_current_msgids() # re-extract locale files from source code ignore_paths = ("env/*", "fabric/*", "media/*", "sitestatic/*", "static/*", "node_modules/*") ignore_args = " ".join([f'--ignore="{p}"' for p in ignore_paths]) cmd(f"python manage.py makemessages -a -e haml,html,txt,py --no-location --no-wrap {ignore_args}") # get the new set of msgids actual_msgids = get_current_msgids() added_msgids = actual_msgids.difference(saved_msgids) removed_msgids = saved_msgids.difference(actual_msgids) if DEBUG: for mid in added_msgids: print(f" + {repr(mid)}") for mid in removed_msgids: print(f" - {repr(mid)}") # if there are no actual changes to msgids, revert if not added_msgids and not removed_msgids: cmd("git checkout -- locale")

Example 48

def update_version_files(args): data, version_path = get_config_file(args.version_file) version_data = data["version_data"] if args.version: version_data["version"] = args.version if args.post: version_data["post"] = args.post if args.rc: version_data["rc"] = args.rc if args.codename: version_data["codename"] = args.codename # Write the updated version_data into the file. with open(version_path, "wt") as fd: fd.write(yaml.safe_dump(data, default_flow_style=False)) # Should not happen but just in case... contents = _VERSION_TEMPLATE % ( escape_string(args.version_file), escape_string(json.dumps(version_data, indent=4))) + _VERSION_CODE # Now copy the static version files to all locations. for path in data["dependent_versions"]: current_dir = os.path.abspath(os.path.dirname( os.path.abspath(__file__))) version_path = os.path.abspath(os.path.join(current_dir, path)) if not os.path.relpath(version_path, current_dir): raise TypeError("Dependent version path is outside tree.") with open(version_path, "wt") as fd: fd.write(contents) update_templates(version_data)

Example 49

def update_xsl_files(journal_object=None, management_command=False): with codecs.open( os.path.join( settings.BASE_DIR, 'utils/install/xsl_files.json'), encoding='utf-8', ) as json_data: default_data = json.load(json_data) for item in default_data: file_path = os.path.join( settings.BASE_DIR, 'transform/xsl/', item["fields"]["file"]) with open(file_path, 'rb') as f: xsl_file = ContentFile(f.read()) xsl_file.name = item["fields"]["file"] default_dict = { 'file': xsl_file, 'comments': item["fields"].get("commments"), } xsl, created = core_models.XSLFile.objects.get_or_create( label=item["fields"]["label"] or settings.DEFAULT_XSL_FILE_LABEL, defaults=default_dict, ) if management_command: print('Parsed XSL {0}'.format(item['fields'].get('label')))

Example 50

def update_project_files(project_name): try: project_files_info = get_latest_project_file_info(project_name) except ApiException as e: print "Update of project files failed!" print e return for file_info in project_files_info["files"]: if not os.path.exists(get_project_path(project_name) + file_info["path"]): try: os.stat(get_project_path(project_name) + os.path.dirname(file_info["path"])) except: os.makedirs(get_project_path(project_name) + os.path.dirname(file_info["path"])) print "Downloading " + file_info["path"] + "..." urllib.urlretrieve(file_info["url"], get_project_path(project_name) + file_info["path"]) elif md5sum(get_project_path(project_name) + file_info["path"]) != file_info["md5sum"]: print "Updating " + file_info["path"] + "..." os.unlink(get_project_path(project_name) + file_info["path"]) urllib.urlretrieve(file_info["url"], get_project_path(project_name) + file_info["path"]) else: print "Skipped " + file_info["path"] store_current_project_version(project_name, project_files_info["version"])

Example 51

def update_existing_files(self): filenames = os.listdir(self.structure_data_dir) for filename in filenames: if filename.endswith('yaml'): file_path = os.sep.join([self.structure_data_dir, filename]) with open(file_path) as f: lines = f.read().splitlines() with open(file_path, 'w') as f: for line in lines: if line.startswith('g_protein'): line = line.replace('g_protein', 'signaling_protein') if not line.startswith('representative'): f.write(line + '\n')

Example 52

def update_servers_files( update_servers_list ): # ---------------------------- from platformcode import platformtools progress = platformtools.dialog_progress_bg( "Update servers list" ) # ---------------------------- for index, server in enumerate( update_servers_list ): # ---------------------------- percentage = index * 100 / len( update_servers_list ) # ---------------------------- data = scrapertools.cache_page( remote_url + server[0] + ".py" ) f = open( os.path.join( local_folder, server[0] + ".py" ) ,'w' ) f.write( data ) f.close() # ---------------------------- progress.update( percentage, ' Update server: "' + server[0] + '"', 'MD5: "' + server[1] + '"' ) # ---------------------------- # ---------------------------- progress.close() # ---------------------------- ### Functions ## init

Example 53

def updateLastSentFiles(sentFiles): config = {} if os.path.isfile(os.path.join(parameters['homepath'], dataDirectory ,"previous-results.json")): with open(os.path.join(parameters['homepath'], dataDirectory ,"previous-results.json"), 'r') as f: config = json.load(f) config['prev_files'] = list(sentFiles) with open(os.path.join(parameters['homepath'], dataDirectory, "previous-results.json"), "w") as f: json.dump(config, f)

Example 54

def update_camera_files(): if not RenderingMakerPanel.init: return camera_files = glob.glob(op.join(mu.get_user_fol(), 'camera', '*camera*.pkl')) if len(camera_files) > 0: files_names = [mu.namebase(fname) for fname in camera_files] if _addon().is_inflated(): files_names = [name for name in files_names if 'inf' in name] files_names.append('camera') else: files_names = [name for name in files_names if 'inf' not in name] items = [(c, c, '', ind) for ind, c in enumerate(files_names)] bpy.types.Scene.camera_files = bpy.props.EnumProperty( items=items, description="electrodes sources", update=camera_files_update) bpy.context.scene.camera_files = 'camera'

Example 55

def update_version_files(targets, version): print("updating version.py files") update_files( targets, version, "version.py", "__version__ .*", '__version__ = "{}"'.format(version), )

Example 56

def update_files_list(self): self.tools = Tools() # initialize model for inserting to table model = QtGui.QStandardItemModel(1, 1) model.setHorizontalHeaderLabels(['File name', 'File size', 'Mimetype', 'File ID']) self.current_bucket_index = self.file_manager_ui.bucket_select_combo_box.currentIndex() self.current_selected_bucket_id = self.bucket_id_list[self.current_bucket_index] i = 0 try: for self.file_details in self.storj_engine.storj_client.bucket_files(str(self.current_selected_bucket_id)): item = QtGui.QStandardItem(str(self.file_details['filename'].replace('[DECRYPTED]', ""))) model.setItem(i, 0, item) # row, column, item (StandardItem) file_size_str = self.tools.human_size(int(self.file_details["size"])) # get human readable file size item = QtGui.QStandardItem(str(file_size_str)) model.setItem(i, 1, item) # row, column, item (QQtGui.StandardItem) item = QtGui.QStandardItem(str(self.file_details['mimetype'])) model.setItem(i, 2, item) # row, column, item (QStandardItem) item = QtGui.QStandardItem(str(self.file_details['id'])) model.setItem(i, 3, item) # row, column, item (QStandardItem) i = i + 1 self.__logger.info(self.file_details) except sjexc.StorjBridgeApiError as e: self.__logger.error(e) self.file_manager_ui.files_list_tableview.clearFocus() self.file_manager_ui.files_list_tableview.setModel(model) self.file_manager_ui.files_list_tableview.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch)

Example 57

def update_config_files() -> None: root.verify_root_access("Root access needed to write files in " + "'" + __basefilepath__ + "files/" + "'") try: zip_archive = __basefilepath__ + "ovpn.zip" if os.path.exists(zip_archive): print(Fore.BLUE + "Previous update file already exists, deleting..." + Style.RESET_ALL) os.remove(zip_archive) subprocess.check_call( ["sudo", "wget", "//downloads.nordcdn.com/configs/archives/servers/ovpn.zip", "-P", __basefilepath__]) except subprocess.CalledProcessError: logger.error("Exception occurred while wgetting zip, is the internet working? \ is nordcdn.com blocked by your ISP or Country?, If so use Privoxy \ [//github.com/jotyGill/openpyn-nordvpn/issues/109]") sys.exit() try: subprocess.check_call( ["sudo", "unzip", "-q", "-u", "-o", __basefilepath__ + "ovpn", "-d", __basefilepath__ + "files/"], stderr=subprocess.DEVNULL) subprocess.check_call( ["sudo", "rm", __basefilepath__ + "ovpn.zip"]) except subprocess.CalledProcessError: try: subprocess.check_call( ["sudo", "rm", "-rf", __basefilepath__ + "files/ovpn_udp"]) subprocess.check_call( ["sudo", "rm", "-rf", __basefilepath__ + "files/ovpn_tcp"]) subprocess.check_call( ["sudo", "unzip", __basefilepath__ + "ovpn", "-d", __basefilepath__ + "files/"]) subprocess.check_call( ["sudo", "rm", __basefilepath__ + "ovpn.zip"]) except subprocess.CalledProcessError: logger.error("Exception occured while unzipping ovpn.zip, is unzip installed?") sys.exit() # Lists information about servers under the given criteria.

Example 58

def update_files(self, source='docker'): sourcetree = os.path.join(self.basepath, source) if os.path.exists(sourcetree): shutil.rmtree(sourcetree) if os.path.exists(self.basepath): shutil.copytree(self.get_sys_data_dirs(source), sourcetree)

Example 59

def update_data_files_version(): install_file_folder = os.path.join(resources.xdg_user_data_folder, "rafcon") install_file_path = os.path.join(install_file_folder, "installed") if not os.path.isdir(install_file_folder): os.mkdir(install_file_folder) with open(install_file_path, "w") as file_pointer: file_pointer.write(rafcon.__version__)

Example 60

def update_recent_files_list(self, new_file=None): "Load and update the recent files list and menus" rf_list = [] if os.path.exists(self.recent_files_path): rf_list_file = open(self.recent_files_path,'r') try: rf_list = rf_list_file.readlines() finally: rf_list_file.close() if new_file: new_file = os.path.abspath(new_file) + '\n' if new_file in rf_list: rf_list.remove(new_file) # move to top rf_list.insert(0, new_file) # clean and save the recent files list bad_paths = [] for path in rf_list: if '\0' in path or not os.path.exists(path[0:-1]): bad_paths.append(path) rf_list = [path for path in rf_list if path not in bad_paths] ulchars = "1234567890ABCDEFGHIJK" rf_list = rf_list[0:len(ulchars)] try: with open(self.recent_files_path, 'w') as rf_file: rf_file.writelines(rf_list) except IOError as err: if not getattr(self.root, "recentfilelist_error_displayed", False): self.root.recentfilelist_error_displayed = True tkMessageBox.showerror(title='IDLE Error', message='Unable to update Recent Files list:\n%s' % str(err), parent=self.text) # for each edit window instance, construct the recent files menu for instance in self.top.instance_dict.keys(): menu = instance.recent_files_menu menu.delete(0, END) # clear, and rebuild: for i, file_name in enumerate(rf_list): file_name = file_name.rstrip() # zap \n # make unicode string to display non-ASCII chars correctly ufile_name = self._filename_to_unicode(file_name) callback = instance.__recent_file_callback(file_name) menu.add_command(label=ulchars[i] + " " + ufile_name, command=callback, underline=0)

Can I overwrite a file in Python?

Example 1: Using the open() method to overwrite a file. To overwrite a file, to write new content into a file, we have to open our file in “w” mode, which is the write mode. It will delete the existing content from a file first; then, we can write new content and save it.

How do you replace contents of a file in Python?

Method 1: Removing all text and write new text in the same file..
Method 2: Using Replace function in for loop..
Method 3: Using the OS module to replace the file with new text..
Method 4: Using fileinput. input().

How do I create and edit a text file in Python?

Python Create Text File.
'w' – open a file for writing. If the file doesn't exist, the open() function creates a new file. Otherwise, it'll overwrite the contents of the existing file..
'x' – open a file for exclusive creation. If the file exists, the open() function raises an error ( FileExistsError )..

Postingan terbaru

LIHAT SEMUA