diff --git a/web/especies.pas b/web/especies.pas index 7157f2f..68dc72a 100644 --- a/web/especies.pas +++ b/web/especies.pas @@ -71,6 +71,7 @@ TWikiSearch = class(TObject) { Search Wikipedia (http://en.wikipedia.org) arti public WIKIPEDIA_URL: string; WIKIMEDIA_URL: string; + WIKIPEDIA_REDIRECT_URL: string; candidates: TStringList; constructor Create; destructor Destroy; override; @@ -111,335 +112,341 @@ TPubMedSearch = class(TObject) { Search PubMed } FFSearch: TFFSearch; PubMedSearch: TPubMedSearch; - { TGBIFSearch methods } +{ TGBIFSearch methods } - constructor TGBIFSearch.Create; - begin - GBIF_URL := 'http://api.gbif.org/v1'; - end; +constructor TGBIFSearch.Create; +begin + GBIF_URL := 'http://api.gbif.org/v1'; +end; - procedure TGBIFSearch.Search(const searchStr: string; var key: integer; +procedure TGBIFSearch.Search(const searchStr: string; var key: integer; var scientificname, authorship, status, valid_name, kingdom, phylum, - classe, order, family: string); - var - JsonData: TJsonData; - begin + classe, order, family: string); +var + JsonData: TJsonData; + Client: TFPHttpClient; +begin + try + Client := TFPHttpClient.Create(nil); try - try - JsonData := GetJson(TFPHTTPClient.SimpleGet(GBIF_URL + - '/species/?name=' + StringReplace(searchStr, ' ', '%20', [rfReplaceAll]))); - key := JsonData.FindPath('results[0].key').AsInteger; - scientificname := JsonData.FindPath('results[0].canonicalName').AsString; - authorship := JsonData.FindPath('results[0].authorship').AsString; - status := JsonData.FindPath('results[0].taxonomicStatus').AsString; - status := LowerCase(StringReplace(status, '_', ' ', [rfReplaceAll])); - if status <> 'accepted' then - valid_name := JsonData.FindPath('results[0].species').AsString; - kingdom := JsonData.FindPath('results[0].kingdom').AsString; - phylum := JsonData.FindPath('results[0].phylum').AsString; - classe := JsonData.FindPath('results[0].class').AsString; - order := JsonData.FindPath('results[0].order').AsString; - family := JsonData.Findpath('results[0].family').AsString; - except - {on E: Exception do - WriteLn('

Error fetching classification data from CoL: ', - E.ClassName, #13#10, E.Message, '

');} - key := 0; - scientificname := ''; - authorship := ''; - status := ''; - valid_name := ''; - kingdom := ''; - phylum := ''; - classe := ''; - order := ''; - family := ''; - end; - finally - JsonData.Free; + JsonData := GetJson(Client.Get(GBIF_URL + '/species/?name=' + + StringReplace(searchStr, ' ', '%20', [rfReplaceAll]))); + key := JsonData.FindPath('results[0].key').AsInteger; + scientificname := JsonData.FindPath('results[0].canonicalName').AsString; + authorship := JsonData.FindPath('results[0].authorship').AsString; + status := JsonData.FindPath('results[0].taxonomicStatus').AsString; + status := LowerCase(StringReplace(status, '_', ' ', [rfReplaceAll])); + if status <> 'accepted' then + valid_name := JsonData.FindPath('results[0].species').AsString; + kingdom := JsonData.FindPath('results[0].kingdom').AsString; + phylum := JsonData.FindPath('results[0].phylum').AsString; + classe := JsonData.FindPath('results[0].class').AsString; + order := JsonData.FindPath('results[0].order').AsString; + family := JsonData.Findpath('results[0].family').AsString; + except + key := 0; + scientificname := ''; + authorship := ''; + status := ''; + valid_name := ''; + kingdom := ''; + phylum := ''; + classe := ''; + order := ''; + family := ''; end; + finally + JsonData.Free; + Client.Free; end; +end; - function TGBIFSearch.Count(key: integer): integer; - var - JsonData: TJsonData; - nrecs: integer; - begin +function TGBIFSearch.Count(key: integer): integer; +var + JsonData: TJsonData; + Client: TFPHttpClient; + nrecs: integer; +begin + try + Client := TFPHttpClient.Create(nil); try - try - JsonData := GetJson(TFPHTTPClient.SimpleGet(GBIF_URL + - '/occurrence/search?taxonKey=' + IntToStr(key))); - nrecs := JsonData.FindPath('count').AsInteger; - Result := nrecs; - except - {on E: Exception do - WriteLn('

Error fetching number of records from GBIF: ', - E.ClassName, #13#10, E.Message, '

');} - Result := 0; - end; - finally - JsonData.Free; + JsonData := GetJson(Client.Get(GBIF_URL + '/occurrence/search?taxonKey=' + + IntToStr(key))); + nrecs := JsonData.FindPath('count').AsInteger; + Result := nrecs; + except + Result := 0; end; + finally + JsonData.Free; + Client.Free; end; +end; - { TNCBISearch methods } +{ TNCBISearch methods } - constructor TNCBISearch.Create; - begin - NCBI_URL := 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/'; - results := TStringList.Create; - results.NameValueSeparator := '|'; - end; +constructor TNCBISearch.Create; +begin + NCBI_URL := 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/'; + results := TStringList.Create; + results.NameValueSeparator := '|'; +end; - destructor TNCBISearch.Destroy; - begin - if FileExists('temp.xml') then - DeleteFile('temp.xml'); - results.Free; - inherited Destroy; - end; +destructor TNCBISearch.Destroy; +begin + results.Free; + inherited Destroy; +end; - procedure TNCBISearch.Summary(const searchStr: string; var id: integer; +procedure TNCBISearch.Summary(const searchStr: string; var id: integer; var division, scientificname, commonname: string; var nucNum, protNum: integer); - var - XmlData: ansistring; - Doc: TXMLDocument; - outfile: TextFile; - Result: TXPathVariable; - begin +var + XmlData: ansistring; + Doc: TXMLDocument; + Result: TXPathVariable; + Client: TFPHttpClient; + MemStrm: TMemoryStream; +begin + try + Client := TFPHttpClient.Create(nil); try - try - XmlData := TFPHTTPClient.SimpleGet(NCBI_URL + 'esearch.fcgi?db=taxonomy&term=' + - StringReplace(searchStr, ' ', '+', [rfReplaceAll])); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - - { Get taxon id } - Result := EvaluateXPathExpression('/eSearchResult/IdList/Id', - Doc.DocumentElement); - id := StrToInt(string(Result.AsText)); - - XmlData := TFPHTTPClient.SimpleGet(NCBI_URL + 'esummary.fcgi?db=taxonomy&id=' + - IntToStr(Id) + '&retmode=xml'); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - - { Get summary data } - Result := EvaluateXPathExpression( - '/eSummaryResult/DocSum/Item[@Name="Division"]', Doc.DocumentElement); - division := string(Result.AsText); - - Result := EvaluateXPathExpression( - '/eSummaryResult/DocSum/Item[@Name="ScientificName"]', Doc.DocumentElement); - scientificname := string(Result.AsText); - - Result := EvaluateXPathExpression( - '/eSummaryResult/DocSum/Item[@Name="CommonName"]', Doc.DocumentElement); - commonname := string(Result.AsText); - - { Get nucleotide sequences } - XmlData := TFPHTTPClient.SimpleGet(NCBI_URL + - 'esearch.fcgi?db=nucleotide&term=' + - StringReplace(searchStr, ' ', '+', [rfReplaceAll])); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - nucNum := StrToInt(string(EvaluateXPathExpression('/eSearchResult/Count', - Doc.DocumentElement).AsText)); - - { Get protein sequences } - XmlData := TFPHTTPClient.SimpleGet(NCBI_URL + 'esearch.fcgi?db=protein&term=' + - StringReplace(searchStr, ' ', '+', [rfReplaceAll])); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - protNum := StrToInt(string(EvaluateXPathExpression('/eSearchResult/Count', - Doc.DocumentElement).AsText)); - except - {on E: Exception do - WriteLn('

Error fetching biomolecular data from NCBI: ', - E.ClassName, #13#10, E.Message, '

');} - id := 0; - division := ''; - scientificName := ''; - nucNum := 0; - protNum := 0; - end; - finally - Result.Free; - Doc.Free; + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(NCBI_URL + 'esearch.fcgi?db=taxonomy&term=' + + StringReplace(searchStr, ' ', '+', [rfReplaceAll])); + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + + { Get taxon id } + Result := EvaluateXPathExpression('/eSearchResult/IdList/Id', + Doc.DocumentElement); + id := StrToInt(string(Result.AsText)); + + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(NCBI_URL + 'esummary.fcgi?db=taxonomy&id=' + + IntToStr(Id) + '&retmode=xml'); + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + + { Get summary data } + Result := EvaluateXPathExpression( + '/eSummaryResult/DocSum/Item[@Name="Division"]', Doc.DocumentElement); + division := string(Result.AsText); + + Result := EvaluateXPathExpression( + '/eSummaryResult/DocSum/Item[@Name="ScientificName"]', Doc.DocumentElement); + scientificname := string(Result.AsText); + + Result := EvaluateXPathExpression( + '/eSummaryResult/DocSum/Item[@Name="CommonName"]', Doc.DocumentElement); + commonname := string(Result.AsText); + + { Get nucleotide sequences } + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(NCBI_URL + 'esearch.fcgi?db=nucleotide&term=' + + StringReplace(searchStr, ' ', '+', [rfReplaceAll])); + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + nucNum := StrToInt(string(EvaluateXPathExpression('/eSearchResult/Count', + Doc.DocumentElement).AsText)); + + { Get protein sequences } + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(NCBI_URL + 'esearch.fcgi?db=protein&term=' + + StringReplace(searchStr, ' ', '+', [rfReplaceAll])); + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + protNum := StrToInt(string(EvaluateXPathExpression('/eSearchResult/Count', + Doc.DocumentElement).AsText)); + except + id := 0; + division := ''; + scientificName := ''; + nucNum := 0; + protNum := 0; end; + finally + Result.Free; + Doc.Free; + Client.Free; end; +end; - function TNCBISearch.Links(id: integer): TStringList; - var - XmlData: ansistring; - Doc: TXMLDocument; - outfile: TextFile; - Result1, Result2: TXPathVariable; - NodeSet1, NodeSet2: TNodeSet; - i: integer; - begin - { Get list of links } +function TNCBISearch.Links(id: integer): TStringList; +var + XmlData: ansistring; + Doc: TXMLDocument; + Result1, Result2: TXPathVariable; + NodeSet1, NodeSet2: TNodeSet; + i: integer; + Client: TFPHttpClient; + MemStrm: TMemoryStream; +begin + { Get list of links } + try + Client := TFPHttpClient.Create(nil); try - try - XmlData := TFPHTTPClient.SimpleGet(NCBI_URL + 'elink.fcgi?dbfrom=taxonomy&id=' + - IntToStr(id) + '&cmd=llinkslib'); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - Result1 := EvaluateXPathExpression('//ObjUrl/Url', Doc.DocumentElement); - Result2 := EvaluateXPathExpression('//ObjUrl/Provider/Name', - Doc.DocumentElement); - NodeSet1 := Result1.AsNodeSet; - NodeSet2 := Result2.AsNodeSet; - if NodeSet1.Count > 0 then - for i := 0 to NodeSet1.Count - 1 do - results.Add(string(TDomElement(NodeSet1.Items[i]).TextContent) + - '|' + string(TDomElement(NodeSet2.Items[i]).TextContent)); - Result := results; - except - {on E: Exception do - WriteLn('

Error fetching links from NCBI: ', - E.ClassName, #13#10, E.Message, '

');} - Result := nil; - end; - finally - Result1.Free; - Result2.Free; + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(NCBI_URL + 'elink.fcgi?dbfrom=taxonomy&id=' + + IntToStr(id) + '&cmd=llinkslib'); + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + Result1 := EvaluateXPathExpression('//ObjUrl/Url', Doc.DocumentElement); + Result2 := EvaluateXPathExpression('//ObjUrl/Provider/Name', + Doc.DocumentElement); + NodeSet1 := Result1.AsNodeSet; + NodeSet2 := Result2.AsNodeSet; + if NodeSet1.Count > 0 then + for i := 0 to NodeSet1.Count - 1 do + results.Add(string(TDomElement(NodeSet1.Items[i]).TextContent) + + '|' + string(TDomElement(NodeSet2.Items[i]).TextContent)); + Result := results; + except + Result := nil; end; + finally + Result1.Free; + Result2.Free; + Client.Free; end; +end; - { TWikiSearch methods } +{ TWikiSearch methods } - constructor TWikiSearch.Create; - begin - WIKIPEDIA_URL := 'https://en.wikipedia.org/api/rest_v1/page/summary/'; - WIKIMEDIA_URL := 'https://en.wikipedia.org/api/rest_v1/page/media-list/'; - {WIKIPEDIA_REDIRECT_URL := 'https://en.wikipedia.org/w/api.php?action=query&titles=';} - candidates := TStringList.Create; - end; +constructor TWikiSearch.Create; +begin + WIKIPEDIA_URL := 'https://en.wikipedia.org/api/rest_v1/page/summary/'; + WIKIMEDIA_URL := 'https://en.wikipedia.org/api/rest_v1/page/media-list/'; + WIKIPEDIA_REDIRECT_URL := 'https://en.wikipedia.org/w/api.php?action=query&titles='; + candidates := TStringList.Create; +end; - destructor TWikiSearch.Destroy; - begin - candidates.Free; - inherited Destroy; - end; +destructor TWikiSearch.Destroy; +begin + candidates.Free; + inherited Destroy; +end; - function TWikiSearch.Snippet(const searchStr: string): string; - var - JsonData: TJsonData; - Client: TFPHttpClient; - begin - Client := TFPHttpClient.Create(nil); +(*function TWikiSearch.Snippet(const searchStr: string): string; +var + JsonData: TJsonData; + Client: TFPHttpClient; +begin + Client := TFPHttpClient.Create(nil); + try try - try - { Allow redirections } - Client.AllowRedirect := True; - JsonData := GetJson(Client.Get(WIKIPEDIA_URL + - StringReplace(searchStr, ' ', '_', [rfReplaceAll]))); - Result := JsonData.FindPath('extract').AsString; - except - {on E: Exception do - WriteLn('

Error fetching text snippet from Wikipedia: ', - E.ClassName, #13#10, E.Message, '

');} - Result := ''; - end; - finally - JsonData.Free; - Client.Free; + { Allow redirections } + Client.AllowRedirect := True; + JsonData := GetJson(Client.Get(WIKIPEDIA_URL + + StringReplace(searchStr, ' ', '_', [rfReplaceAll]))); + Result := JsonData.FindPath('extract').AsUnicodeString; + except + Result := ''; end; + finally + JsonData.Free; + Client.Free; end; - -(* function TWikiSearch.Snippet(const searchStr: string): string; +end;*) + +function TWikiSearch.Snippet(const searchStr: string): string; var JsonData: TJsonData; + queryStr: string; + Client: TFPHttpClient; begin try + Client := TFPHttpClient.Create(nil); try { Allow redirections } - JsonData := GetJSON(TFPHTTPClient.SimpleGet(WIKIPEDIA_REDIRECT_URL + - StringReplace(queryStr, ' ', '+', [rfReplaceAll]) + '&redirects&format=json')); + JsonData := GetJSON(Client.Get(WIKIPEDIA_REDIRECT_URL + + StringReplace(searchStr, ' ', '+', [rfReplaceAll]) + '&redirects&format=json')); if JsonData.FindPath('query.redirects[0].to') <> nil then - queryStr := JsonData.FindPath('query.redirects[0].to').AsString; - JsonData := GetJson(TFPHTTPClient.SimpleGet(WIKIPEDIA_URL + - StringReplace(searchStr, ' ', '_', [rfReplaceAll]))); - Result := JsonData.FindPath('extract').AsString; + queryStr := JsonData.FindPath('query.redirects[0].to').AsString + else + queryStr := searchStr; + JsonData := GetJson(Client.Get(WIKIPEDIA_URL + + StringReplace(queryStr, ' ', '_', [rfReplaceAll]))); + Result := JsonData.FindPath('extract').AsUnicodeString; except Result := ''; end; finally JsonData.Free; + Client.Free; end; -end; *) +end; - { Search images from Wikimedia Commons } - function TWikiSearch.Images(const searchStr: string; limit: integer = 10): TStringList; - var - JsonData, JsonItem, JsonItems: TJsonData; - Client: TFPHttpClient; - i, Count: integer; - ext: string; - begin +(*{ Search images from Wikimedia Commons } +function TWikiSearch.Images(const searchStr: string; limit: integer = 10): TStringList; +var + JsonData, JsonItem, JsonItems: TJsonData; + Client: TFPHttpClient; + i, Count: integer; + ext: string; +begin + try try - try - Client := TFPHttpClient.Create(nil); - Client.AllowRedirect := True; - JsonData := GetJson(Client.Get(WIKIMEDIA_URL + - StringReplace(searchStr, ' ', '_', [rfReplaceAll]))); - JsonItems := JsonData.FindPath('items'); - Count := 0; - for i := 0 to JsonItems.Count - 1 do + Client := TFPHttpClient.Create(nil); + Client.AllowRedirect := True; + JsonData := GetJson(Client.Get(WIKIMEDIA_URL + + StringReplace(searchStr, ' ', '_', [rfReplaceAll]))); + JsonItems := JsonData.FindPath('items'); + Count := 0; + for i := 0 to JsonItems.Count - 1 do + begin + JsonItem := JsonItems.Items[i]; + ext := ExtractFileExt(JsonItem.FindPath('title').AsString); + if (ext = '.jpg') then begin - JsonItem := JsonItems.Items[i]; - ext := ExtractFileExt(JsonItem.FindPath('title').AsString); - if (ext = '.jpg') then - begin - candidates.Add(JsonItem.FindPath('title').AsString); - Inc(Count); - if Count >= limit then - break; - end; + candidates.Add(JsonItem.FindPath('title').AsString); + Inc(Count); + if Count >= limit then + break; end; - Result := candidates; - except - {on E: Exception do - WriteLn('

Error fetching images from Wikimedia: ', - E.ClassName, #13#10, E.Message, '

');} - candidates := nil; end; - finally - JsonData.Free; - Client.Free; + Result := candidates; + except + candidates := nil; end; + finally + JsonData.Free; + Client.Free; end; - -(*function TWikiSearch.Images(const searchStr: string; limit: integer = 10): TStringList; +end; *) + +function TWikiSearch.Images(const searchStr: string; limit: integer = 10): TStringList; var JsonData, JsonItem, JsonItems: TJsonData; i, Count: integer; - ext: string; + queryStr, ext: string; + Client: TFPHttpClient; begin try + Client := TFPHttpClient.Create(nil); try - JsonData := GetJSON(TFPHTTPClient.SimpleGet(WIKIPEDIA_REDIRECT_URL + - StringReplace(queryStr, ' ', '+', [rfReplaceAll]) + '&redirects&format=json')); + JsonData := GetJSON(Client.Get(WIKIPEDIA_REDIRECT_URL + + StringReplace(searchStr, ' ', '+', [rfReplaceAll]) + '&redirects&format=json')); if JsonData.FindPath('query.redirects[0].to') <> nil then - queryStr := JsonData.FindPath('query.redirects[0].to').AsString; - JsonData := GetJson(TFHTTPClient.SimpleGet(WIKIMEDIA_URL + - StringReplace(searchStr, ' ', '_', [rfReplaceAll]))); + queryStr := JsonData.FindPath('query.redirects[0].to').AsString + else + queryStr := searchStr; + JsonData := GetJson(Client.Get(WIKIMEDIA_URL + + StringReplace(queryStr, ' ', '_', [rfReplaceAll]))); JsonItems := JsonData.FindPath('items'); Count := 0; for i := 0 to JsonItems.Count - 1 do @@ -460,139 +467,138 @@ TPubMedSearch = class(TObject) { Search PubMed } end; finally JsonData.Free; + Client.Free; end; -end;*) +end; - { TFFSearch methods } +{ TFFSearch methods } - constructor TFFSearch.Create; - begin - FF_URL := 'http://termextract.fivefilters.org/'; - Lines := TStringList.Create; - end; +constructor TFFSearch.Create; +begin + FF_URL := 'http://termextract.fivefilters.org/'; + Lines := TStringList.Create; +end; - destructor TFFSearch.Destroy; - begin - if FileExists('temp.txt') then - DeleteFile('temp.txt'); - Lines.Free; - inherited Destroy; - end; +destructor TFFSearch.Destroy; +begin + Lines.Free; + inherited Destroy; +end; - { Provides a list of significant words or phrases extracted from a larger content from FiveFilters Web service } +{ Provides a list of significant words or phrases extracted from a larger content from FiveFilters Web service } - function TFFSearch.termExtract(const contextStr: string; - limit: integer = 10): TStringList; - var - XmlData: ansistring; - outfile: TextFile; - begin +function TFFSearch.termExtract(const contextStr: string; + limit: integer = 10): TStringList; +var + TextData: ansistring; + Client: TFPHttpClient; + MemStrm: TMemoryStream; +begin + try + Client := TFPHttpClient.Create(nil); try - try - XmlData := TFPHTTPClient.SimpleGet(FF_URL + 'extract.php?text=' + - StringReplace(contextStr, ' ', '+', [rfReplaceAll]) + - '&output=txt&max=' + IntToStr(limit)); - AssignFile(outfile, 'temp.txt'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - Lines.LoadFromFile('temp.txt'); - Result := Lines; - except - {on E: Exception do - WriteLn('

Error fetching keywords from FiveFilters: ', - E.ClassName, #13#10, E.Message, '

');} - Result := nil; - end; - finally + MemStrm := TMemoryStream.Create; + TextData := Client.Get(FF_URL + 'extract.php?text=' + + StringReplace(contextStr, ' ', '+', [rfReplaceAll]) + + '&output=txt&max=' + IntToStr(limit)); + Lines.Text := StringReplace(TextData, '\n', LineEnding, + [rfReplaceAll, rfIgnoreCase]); + Result := Lines; + except + Result := nil; end; + finally + Client.Free; + MemStrm.Free; end; +end; - { TPubMedSearch methods } +{ TPubMedSearch methods } - constructor TPubMedSearch.Create; - begin - PUBMED_URL := 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/'; - references := TStringList.Create; - end; +constructor TPubMedSearch.Create; +begin + PUBMED_URL := 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/'; + references := TStringList.Create; +end; - destructor TPubMedSearch.Destroy; - begin - if FileExists('temp.xml') then - DeleteFile('temp.xml'); - references.Free; - inherited Destroy; - end; +destructor TPubMedSearch.Destroy; +begin + references.Free; + inherited Destroy; +end; - function TPubMedSearch.Search(const searchStr: string; - limit: integer = 10): TStringList; - var - XmlData: ansistring; - Doc: TXMLDocument; - outfile: TextFile; - Result1, Result2: TXPathVariable; - NodeSet1, NodeSet2, Ids: TNodeSet; - id: string; - i: integer; - begin +function TPubMedSearch.Search(const searchStr: string; + limit: integer = 10): TStringList; +var + XmlData: ansistring; + Doc: TXMLDocument; + Result1, Result2: TXPathVariable; + NodeSet1, NodeSet2, Ids: TNodeSet; + id: string; + i: integer; + Client: TFPHttpClient; + MemStrm: TMemoryStream; +begin + try + Client := TFPHttpClient.Create(nil); try - try - XmlData := TFPHTTPClient.SimpleGet(PUBMED_URL + - 'esearch.fcgi?db=pubmed&retmax=' + IntToStr(limit) + - '&sort=relevance&term=' + StringReplace(searchStr, ' ', '+', [rfReplaceAll])); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - - { Get reference ids } - Result1 := EvaluateXPathExpression('/eSearchResult/IdList/Id', - Doc.DocumentElement); - Ids := Result1.AsNodeSet; - id := ''; - if Ids.Count > 0 then - for i := 0 to Ids.Count - 1 do - id := id + string(TDomElement(Ids.Items[i]).TextContent) + ','; - - XmlData := TFPHTTPClient.SimpleGet(PUBMED_URL + 'efetch.fcgi?db=pubmed&id=' + - id + '&retmode=xml'); - AssignFile(outfile, 'temp.xml'); - Rewrite(outfile); - WriteLn(outfile, XmlData); - CloseFile(outfile); - ReadXMLFile(Doc, 'temp.xml'); - - { Get list of references } - Result1 := EvaluateXPathExpression('//Article/ArticleTitle', - Doc.DocumentElement); - Result2 := EvaluateXPathExpression( - '//PubmedData/ArticleIdList/ArticleId[@IdType="doi"]', Doc.DocumentElement); - NodeSet1 := Result1.AsNodeSet; - NodeSet2 := Result2.AsNodeSet; - if NodeSet1.Count > 0 then - begin - for i := 0 to NodeSet1.Count - 1 do - try - references.Add(string(TDomElement(NodeSet1.Items[i]).TextContent) + - '=' + string(TDomElement(NodeSet2.Items[i]).TextContent)); - except - continue; - end; - end; - Result := references; - except - {on E: Exception do - WriteLn('

Error fetching references from PubMed: ', - E.ClassName, #13#10, E.Message, '

');} - Result := nil; + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(PUBMED_URL + 'esearch.fcgi?db=pubmed&retmax=' + + IntToStr(limit) + '&sort=relevance&term=' + + StringReplace(searchStr, ' ', '+', [rfReplaceAll])); + + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + + { Get reference ids } + Result1 := EvaluateXPathExpression('/eSearchResult/IdList/Id', + Doc.DocumentElement); + Ids := Result1.AsNodeSet; + id := ''; + if Ids.Count > 0 then + for i := 0 to Ids.Count - 1 do + id := id + string(TDomElement(Ids.Items[i]).TextContent) + ','; + + MemStrm := TMemoryStream.Create; + XmlData := Client.Get(PUBMED_URL + 'efetch.fcgi?db=pubmed&id=' + + id + '&retmode=xml'); + if Length(XmlData) > 0 then + MemStrm.Write(XmlData[1], Length(XmlData)); + MemStrm.Position := 0; + ReadXMLFile(Doc, MemStrm); + MemStrm.Free; + + { Get list of references } + Result1 := EvaluateXPathExpression('//Article/ArticleTitle', + Doc.DocumentElement); + Result2 := EvaluateXPathExpression( + '//PubmedData/ArticleIdList/ArticleId[@IdType="doi"]', Doc.DocumentElement); + NodeSet1 := Result1.AsNodeSet; + NodeSet2 := Result2.AsNodeSet; + if NodeSet1.Count > 0 then + begin + for i := 0 to NodeSet1.Count - 1 do + try + references.Add(string(TDomElement(NodeSet1.Items[i]).TextContent) + + '=' + string(TDomElement(NodeSet2.Items[i]).TextContent)); + except + continue; + end; end; - finally - Result1.Free; - Result2.Free; - Doc.Free; + Result := references; + except + Result := nil; end; + finally + Result1.Free; + Result2.Free; + Doc.Free; + Client.Free; end; +end; begin InitSSLInterface; @@ -642,7 +648,7 @@ TPubMedSearch = class(TObject) { Search PubMed } status := ' (' + status + ')'; end; taxon_list := kingdom + '; ' + phylum + '; ' + classe + '; ' + order + '; ' + family; - WriteLn('

' + queryStr + '' + ' ' + authorship + status + '

'); + WriteLn('

' + queryStr + ' ' + authorship + ' ' + status + '

'); WriteLn('

Classification from CoL

'); if Length(scientificname) = 0 then WriteLn('No names found') @@ -661,9 +667,10 @@ TPubMedSearch = class(TObject) { Search PubMed } begin tag := tags[i]; tag := StringReplace(tag, ' ', ' ', [rfReplaceAll]); - tagHTML := tagHTML + - '' - + tag + ' ' + ''; + if Length(tag) > 0 then + tagHTML := tagHTML + + '' + + tag + ' ' + ''; end; WriteLn(tagHTML); FFSearch.Destroy; diff --git a/web/especies.py b/web/especies.py index 07ea855..789a4e7 100644 --- a/web/especies.py +++ b/web/especies.py @@ -80,6 +80,10 @@ # references # # Version 1.50 19th Jul 23 - Added italics to scientific names in taxon # # classification output # +# Version 1.60 18th Aug 23 - Added a missing space between scienfic name # +# and authorship # +# Fixed a bug which sometimes caused an error # +# when listing references from PubMed # #================================================================================# import cgi @@ -346,7 +350,7 @@ def search(self, searchStr, limit=10): status = " (" + status + " of " + valid_name + "" + ") " + author else: status = " (" + status + ")" - print "

" + queryStr + "" + author + status + "

" + print "

" + queryStr + " " + author + status + "

" print "

Classification from CoL

" if len(name) == 0: print "No names found" @@ -418,8 +422,9 @@ def search(self, searchStr, limit=10): print "No articles found" else: for pub in pubs.keys(): - print "
" - print "" + pubs[pub] + "
" + if pubs[pub] != None: + print "
" + print "" + pubs[pub] + "
" else: print "e-Species" print "

Error filling out form

"