adriancole
11 years ago
5 changed files with 285 additions and 1 deletions
@ -0,0 +1,49 @@
@@ -0,0 +1,49 @@
|
||||
apply plugin: 'java' |
||||
|
||||
dependencies { |
||||
compile 'com.netflix.feign:feign-core:3.1.0' |
||||
compile 'com.netflix.feign:feign-gson:3.1.0' |
||||
provided 'com.squareup.dagger:dagger-compiler:1.0.1' |
||||
} |
||||
|
||||
// create a self-contained jar that is executable |
||||
// the output is both a 'fat' project artifact and |
||||
// a convenience file named "build/github" |
||||
task fatJar(dependsOn: classes, type: Jar) { |
||||
classifier 'fat' |
||||
|
||||
doFirst { |
||||
// Delay evaluation until the compile configuration is ready |
||||
from { |
||||
configurations.compile.collect { zipTree(it) } |
||||
} |
||||
} |
||||
|
||||
from (sourceSets*.output.classesDir) { |
||||
} |
||||
|
||||
// really executable jar |
||||
// http://skife.org/java/unix/2011/06/20/really_executable_jars.html |
||||
|
||||
manifest { |
||||
attributes 'Main-Class': 'feign.example.wikipedia.WikipediaExample' |
||||
} |
||||
|
||||
// for convenience, we make a file in the build dir named github with no extension |
||||
doLast { |
||||
def srcFile = new File("${buildDir}/libs/${archiveName}") |
||||
def shortcutFile = new File("${buildDir}/wikipedia") |
||||
shortcutFile.delete() |
||||
shortcutFile << "#!/usr/bin/env sh\n" |
||||
shortcutFile << 'exec java -jar $0 "$@"' + "\n" |
||||
shortcutFile << srcFile.bytes |
||||
shortcutFile.setExecutable(true, true) |
||||
srcFile.delete() |
||||
srcFile << shortcutFile.bytes |
||||
srcFile.setExecutable(true, true) |
||||
} |
||||
} |
||||
|
||||
artifacts { |
||||
archives fatJar |
||||
} |
@ -0,0 +1,87 @@
@@ -0,0 +1,87 @@
|
||||
package feign.example.wikipedia; |
||||
|
||||
import com.google.gson.stream.JsonReader; |
||||
import feign.codec.Decoder; |
||||
|
||||
import java.io.IOException; |
||||
import java.io.Reader; |
||||
import java.lang.reflect.Type; |
||||
|
||||
abstract class ResponseDecoder<X> implements Decoder.TextStream<WikipediaExample.Response<X>> { |
||||
|
||||
/** |
||||
* name of the key inside the {@code query} dict which holds the elements desired. ex. {@code pages}. |
||||
*/ |
||||
protected abstract String query(); |
||||
|
||||
/** |
||||
* Parses the contents of a result object. |
||||
* <p/> |
||||
* <br> |
||||
* ex. If {@link #query()} is {@code pages}, then this would parse the value of each key in the dict {@code pages}. |
||||
* In the example below, this would first start at line {@code 3}. |
||||
* <p/> |
||||
* <pre> |
||||
* "pages": { |
||||
* "2576129": { |
||||
* "pageid": 2576129, |
||||
* "title": "Burchell's zebra", |
||||
* --snip-- |
||||
* </pre> |
||||
*/ |
||||
protected abstract X build(JsonReader reader) throws IOException; |
||||
|
||||
/** |
||||
* the wikipedia api doesn't use json arrays, rather a series of nested objects. |
||||
*/ |
||||
@Override |
||||
public WikipediaExample.Response<X> decode(Reader ireader, Type type) throws IOException { |
||||
WikipediaExample.Response<X> pages = new WikipediaExample.Response<X>(); |
||||
JsonReader reader = new JsonReader(ireader); |
||||
reader.beginObject(); |
||||
while (reader.hasNext()) { |
||||
String nextName = reader.nextName(); |
||||
if ("query".equals(nextName)) { |
||||
reader.beginObject(); |
||||
while (reader.hasNext()) { |
||||
if (query().equals(reader.nextName())) { |
||||
reader.beginObject(); |
||||
while (reader.hasNext()) { |
||||
// each element is in form: "id" : { object }
|
||||
// this advances the pointer to the value and skips the key
|
||||
reader.nextName(); |
||||
reader.beginObject(); |
||||
pages.add(build(reader)); |
||||
reader.endObject(); |
||||
} |
||||
reader.endObject(); |
||||
} else { |
||||
reader.skipValue(); |
||||
} |
||||
} |
||||
reader.endObject(); |
||||
} else if ("query-continue".equals(nextName)) { |
||||
reader.beginObject(); |
||||
while (reader.hasNext()) { |
||||
if ("search".equals(reader.nextName())) { |
||||
reader.beginObject(); |
||||
while (reader.hasNext()) { |
||||
if ("gsroffset".equals(reader.nextName())) { |
||||
pages.nextOffset = reader.nextLong(); |
||||
} |
||||
} |
||||
reader.endObject(); |
||||
} else { |
||||
reader.skipValue(); |
||||
} |
||||
} |
||||
reader.endObject(); |
||||
} else { |
||||
reader.skipValue(); |
||||
} |
||||
} |
||||
reader.endObject(); |
||||
reader.close(); |
||||
return pages; |
||||
} |
||||
} |
@ -0,0 +1,145 @@
@@ -0,0 +1,145 @@
|
||||
/* |
||||
* Copyright 2013 Netflix, Inc. |
||||
* |
||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||
* you may not use this file except in compliance with the License. |
||||
* You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package feign.example.wikipedia; |
||||
|
||||
import com.google.gson.stream.JsonReader; |
||||
import dagger.Module; |
||||
import dagger.Provides; |
||||
import feign.Feign; |
||||
import feign.Logger; |
||||
import feign.RequestLine; |
||||
import feign.codec.Decoder; |
||||
import feign.gson.GsonModule; |
||||
|
||||
import javax.inject.Named; |
||||
import java.io.IOException; |
||||
import java.util.ArrayList; |
||||
import java.util.Iterator; |
||||
|
||||
import static dagger.Provides.Type.SET; |
||||
import static feign.Logger.ErrorLogger; |
||||
import static feign.Logger.Level.BASIC; |
||||
|
||||
public class WikipediaExample { |
||||
|
||||
public static interface Wikipedia { |
||||
@RequestLine("GET /w/api.php?action=query&generator=search&prop=info&format=json&gsrsearch={search}") |
||||
Response<Page> search(@Named("search") String search); |
||||
|
||||
@RequestLine("GET /w/api.php?action=query&generator=search&prop=info&format=json&gsrsearch={search}&gsroffset={offset}") |
||||
Response<Page> resumeSearch(@Named("search") String search, @Named("offset") long offset); |
||||
} |
||||
|
||||
static class Page { |
||||
long id; |
||||
String title; |
||||
} |
||||
|
||||
public static class Response<X> extends ArrayList<X> { |
||||
/** |
||||
* when present, the position to resume the list. |
||||
*/ |
||||
Long nextOffset; |
||||
} |
||||
|
||||
public static void main(String... args) throws InterruptedException { |
||||
Wikipedia wikipedia = Feign.create(Wikipedia.class, "http://en.wikipedia.org", new WikipediaModule()); |
||||
|
||||
System.out.println("Let's search for PTAL!"); |
||||
Iterator<Page> pages = lazySearch(wikipedia, "PTAL"); |
||||
while (pages.hasNext()) { |
||||
System.out.println(pages.next().title); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* this will lazily continue searches, making new http calls as necessary. |
||||
* |
||||
* @param wikipedia used to search |
||||
* @param query see {@link Wikipedia#search(String)}. |
||||
*/ |
||||
static Iterator<Page> lazySearch(final Wikipedia wikipedia, final String query) { |
||||
final Response<Page> first = wikipedia.search(query); |
||||
if (first.nextOffset == null) |
||||
return first.iterator(); |
||||
return new Iterator<Page>() { |
||||
Iterator<Page> current = first.iterator(); |
||||
Long nextOffset = first.nextOffset; |
||||
|
||||
@Override |
||||
public boolean hasNext() { |
||||
while (!current.hasNext() && nextOffset != null) { |
||||
System.out.println("Wow.. even more results than " + nextOffset); |
||||
Response<Page> nextPage = wikipedia.resumeSearch(query, nextOffset); |
||||
current = nextPage.iterator(); |
||||
nextOffset = nextPage.nextOffset; |
||||
} |
||||
return current.hasNext(); |
||||
} |
||||
|
||||
@Override |
||||
public Page next() { |
||||
return current.next(); |
||||
} |
||||
|
||||
@Override |
||||
public void remove() { |
||||
throw new UnsupportedOperationException(); |
||||
} |
||||
}; |
||||
} |
||||
|
||||
@Module(overrides = true, library = true, includes = GsonModule.class) |
||||
static class WikipediaModule { |
||||
|
||||
@Provides Logger.Level loggingLevel() { |
||||
return BASIC; |
||||
} |
||||
|
||||
@Provides Logger logger() { |
||||
return new ErrorLogger(); |
||||
} |
||||
|
||||
/** |
||||
* add to the set of Decoders one that handles {@code Response<Page>}. |
||||
*/ |
||||
@Provides(type = SET) Decoder pagesDecoder() { |
||||
return new ResponseDecoder<Page>() { |
||||
|
||||
@Override |
||||
protected String query() { |
||||
return "pages"; |
||||
} |
||||
|
||||
@Override |
||||
protected Page build(JsonReader reader) throws IOException { |
||||
Page page = new Page(); |
||||
while (reader.hasNext()) { |
||||
String key = reader.nextName(); |
||||
if (key.equals("pageid")) { |
||||
page.id = reader.nextLong(); |
||||
} else if (key.equals("title")) { |
||||
page.title = reader.nextString(); |
||||
} else { |
||||
reader.skipValue(); |
||||
} |
||||
} |
||||
return page; |
||||
} |
||||
}; |
||||
} |
||||
} |
||||
} |
@ -1,2 +1,2 @@
@@ -1,2 +1,2 @@
|
||||
rootProject.name='feign' |
||||
include 'feign-core', 'feign-gson', 'feign-jaxrs', 'feign-ribbon', 'examples:feign-example-github' |
||||
include 'feign-core', 'feign-gson', 'feign-jaxrs', 'feign-ribbon', 'examples:feign-example-github', 'examples:feign-example-wikipedia' |
||||
|
Loading…
Reference in new issue