--- a/scripts/teamforge-import.py
+++ b/scripts/teamforge-import.py
@@ -4,10 +4,11 @@
 import re
 import os
 import os.path
-
-from urllib2 import urlopen
+from time import mktime
+import json
+
 from urlparse import urlparse
-from urllib import urlretrieve
+from urllib import FancyURLopener
 
 from suds.client import Client
 from suds import WebFault
@@ -32,7 +33,7 @@
     global options, s
     optparser = OptionParser(usage='''%prog [--options] [projID projID projID]\nIf no project ids are given, all projects will be migrated''')
     optparser.add_option('--api-url', dest='api_url', help='e.g. https://hostname/ce-soap50/services/')
-    optparser.add_option('--attachment-url', dest='attachment_url', default='/sf/wiki/do/viewAttachment/')
+    optparser.add_option('--attachment-url', dest='attachment_url', default='/sf/%s/do/%s/')
     optparser.add_option('--default-wiki-text', dest='default_wiki_text', default='PRODUCT NAME HERE', help='used in determining if a wiki page text is default or changed')
     optparser.add_option('-u', '--username', dest='username')
     optparser.add_option('-p', '--password', dest='password')
@@ -65,12 +66,14 @@
         os.makedirs(options.output_dir)
     for pid in project_ids:
         project = c.service.getProjectData(s, pid)
+        project.shortname = project.path.split('.')[-1]
         log.info('Project: %s %s %s' % (project.id, project.title, project.path))
 
         out_dir = os.path.join(options.output_dir, project.id)
         if not os.path.exists(out_dir):
             os.mkdir(out_dir)
 
+        get_files(project)
         get_homepage_wiki(project)
         check_unsupported_tools(project)
 
@@ -106,16 +109,30 @@
     with open(out_file, 'w') as out:
         out.write(content)
 
-def download_attachment(url_path, *filepaths):
+class StatusCheckingURLopener(FancyURLopener):
+  def http_error_default(self, url, fp, errcode, errmsg, headers):
+        raise Exception(errcode)
+statusCheckingURLopener = StatusCheckingURLopener()
+
+def download_file(tool, url_path, *filepaths):
+    if tool == 'wiki':
+        action = 'viewAttachment'
+    elif tool == 'frs':
+        action = 'downloadFile'
+    else:
+        raise ValueError('tool %s not supported' % tool)
+    action_url = options.attachment_url % (tool, action)
+
     out_file = os.path.join(options.output_dir, *filepaths)
     if not os.path.exists(os.path.dirname(out_file)):
         os.makedirs(os.path.dirname(out_file))
 
     hostname = urlparse(options.api_url).hostname
     scheme = urlparse(options.api_url).scheme
-    url = scheme + '://' + hostname + options.attachment_url + url_path
+    url = scheme + '://' + hostname + action_url + url_path
     log.debug('fetching %s' % url)
-    urlretrieve(url, out_file)
+    statusCheckingURLopener.retrieve(url, out_file)
+    return out_file
 
 bracket_macro = re.compile(r'\[(.*?)\]')
 h1 = re.compile(r'^!!!', re.MULTILINE)
@@ -163,10 +180,12 @@
     wiki_pages = wiki.service.getWikiPageList(s, project.id)
     for wiki_page in wiki_pages.dataRows:
         wiki_page = wiki.service.getWikiPageData(s, wiki_page.id)
+        pagename = wiki_page.path.split('/')[-1]
+        save(json.dumps(dict(wiki_page), default=str), project, 'wiki', pagename+'.json')
         if not wiki_page.wikiText:
             log.debug('skip blank wiki page %s' % wiki_page.path)
             continue
-        pages[wiki_page.path.split('/')[-1]] = wiki_page.wikiText
+        pages[pagename] = wiki_page.wikiText
 
     # PageApp does not provide a useful way to determine the Project Home special wiki page
     # so use some heuristics
@@ -186,13 +205,62 @@
         save(homepage, project, 'wiki', 'homepage.markdown')
         for img_ref in find_image_references(homepage):
             filename = img_ref.split('/')[-1]
-            download_attachment(project.path + '/wiki/' + img_ref, project.id, 'wiki', 'homepage', filename)
+            download_file('wiki', project.path + '/wiki/' + img_ref, project.id, 'homepage', filename)
 
     for path, text in pages.iteritems():
         if options.default_wiki_text in text:
             log.debug('skipping default wiki page %s' % path)
         else:
             save(text, project, 'wiki', path+'.markdown')
+
+def get_files(project):
+    frs = make_client(options.api_url, 'FrsApp')
+    valid_pfs_filename = re.compile(r'(?![. ])[-_ +.,=#~@!()\[\]a-zA-Z0-9]+(?<! )$')
+    pfs_output_dir = os.path.join(os.path.abspath(options.output_dir), 'PFS', project.shortname)
+
+    def handle_path(obj, prev_path):
+        path_component = obj.title.strip().replace('/', ' ').replace('&','').replace(':','')
+        path = os.path.join(prev_path, path_component)
+        if not valid_pfs_filename.match(path_component):
+            log.error('Invalid filename: "%s"' % path)
+        save(json.dumps(dict(obj), default=str),
+            project, 'frs', path+'.json')
+        return path
+
+    for pkg in frs.service.getPackageList(s, project.id).dataRows:
+        pkg_path = handle_path(pkg, '')
+
+        for rel in frs.service.getReleaseList(s, pkg.id).dataRows:
+            rel_path = handle_path(rel, pkg_path)
+
+            for file in frs.service.getFrsFileList(s, rel.id).dataRows:
+                details = frs.service.getFrsFileData(s, file.id)
+
+                file_path = os.path.join(rel_path, file.title.strip())
+                save(json.dumps(dict(file,
+                                     lastModifiedBy=details.lastModifiedBy,
+                                     lastModifiedDate=details.lastModifiedDate,
+                                     ),
+                                default=str),
+                     project,
+                     'frs',
+                     file_path+'.json'
+                     )
+                #'''
+                download_file('frs', rel.path + '/' + file.id, pfs_output_dir, file_path)
+                # TODO: createdOn
+                mtime = int(mktime(details.lastModifiedDate.timetuple()))
+                os.utime(os.path.join(pfs_output_dir, file_path), (mtime, mtime))
+
+                # now set mtime on the way back up the tree (so it isn't clobbered):
+
+            # TODO: createdOn
+            mtime = int(mktime(rel.lastModifiedOn.timetuple()))
+            os.utime(os.path.join(pfs_output_dir, rel_path), (mtime, mtime))
+        # TODO: createdOn
+        mtime = int(mktime(pkg.lastModifiedOn.timetuple()))
+        os.utime(os.path.join(pfs_output_dir, pkg_path), (mtime, mtime))
+                #'''
 
 '''
 print c.service.getProjectData(s, p.id)
@@ -221,9 +289,6 @@
     main()
 
 
-from mock import patch
-from nose.tools import assert_equal
-
 def test_convert_markup():
 
     markup = '''