RemoteWebDriver cannot be casted to SikuliWebDriver - java

I was trying to run Sikuli WebDriver based tests on Sauce On Demand infrastructure.
But I have a problem with RemoteWebDriver.
I have this BaseSikuliWebDriver class
package com.pitito.sikuli.base;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import org.openqa.selenium.Platform;
import org.openqa.selenium.remote.Augmenter;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.testng.ITestResult;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import com.pitito.core.basetests.BaseLoggingTest;
import com.pitito.selenium.webdriver.RemoteWebDriverSession;
import com.pitito.selenium.webdriver.WebDriverScreenshooter;
import com.pitito.sikuli.webdriver.SikuliFirefoxDriver;
/**
* Base class for all Sikuli WebDriver tests.
*
* #author guillem.hernandez
*/
public abstract class BaseSikuliWebDriverTest {
Map<String, Object> sauceJob = new HashMap<String, Object>();
private static SikuliFirefoxDriver sikuliDriver;
protected SikuliFirefoxDriver driver() {
return getDriver();
}
public static SikuliFirefoxDriver getDriver() {
return sikuliDriver;
}
public static void setDriver(SikuliFirefoxDriver driver) {
BaseSikuliWebDriverTest.sikuliDriver = driver;
}
#Override
#BeforeMethod(alwaysRun = true)
protected void setup(Method method, Object[] testArguments) {
super.setup(method, testArguments);
String sessionId = method.getName() + "_" + testArguments.hashCode();
DesiredCapabilities caps = DesiredCapabilities.firefox();
caps.setCapability("id", sessionId);
caps.setCapability("name", sessionId);
caps.setCapability(CapabilityType.BROWSER_NAME, "firefox");
caps.setCapability("platform", Platform.XP);
caps.setCapability("version", "21");
try {
sikuliDriver = (SikuliFirefoxDriver) new Augmenter().augment(new RemoteWebDriver(new URL("http://"
+ RemoteWebDriverSession.USER + ":" + RemoteWebDriverSession.APIKEY
+ "#ondemand.saucelabs.com:80/wd/hub"), caps));
} catch (MalformedURLException e) {
e.printStackTrace();
}
setDriver(sikuliDriver);
}
#Override
#AfterMethod(alwaysRun = true)
protected void teardown(ITestResult tr, Method method) {
if ((logger() != null) && (tr.getStatus() == ITestResult.FAILURE)) {
logUnexpectedException(tr.getThrowable());
}
super.teardown(tr, method);
sikuliDriver.quit();
}
#Override
protected void logScreenshot(String screenshotName) {
logResource(new WebDriverScreenshooter(driver(), screenshotName).getScreenshot());
}
}
The test I implemented is the Sikuli WebDriver example and the code is as follows:
package com.pitito.sikuli.tests;
import java.net.MalformedURLException;
import java.net.URL;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebElement;
import org.testng.annotations.Test;
import com.pitito.sikuli.base.BaseSikuliWebDriverTest;
import com.pitito.sikuli.webdriver.ImageElement;
/**
* Sikuli Firefox WebDriver Automated Test Example.
*
* #author guillem.hernandez
*/
public class SikuliGoogleCodeTest extends BaseSikuliWebDriverTest {
#Test(groups = { "ES" }, description = "Use Sikuli to search on Google Maps")
public void testSikuliWebDriverPassingExample_ES() {
verifySikuliWebDriverPassingTest();
}
private void verifySikuliWebDriverPassingTest() {
// visit Google Map
driver().get("https://maps.google.com/");
// enter "Denver, CO" as search terms
WebElement input = driver().findElement(By.id("gbqfq"));
input.sendKeys("Denver, CO");
input.sendKeys(Keys.ENTER);
ImageElement image;
// find and click on the image of the lakewood area
try {
image = driver().findImageElement(new URL("https://dl.dropbox.com/u/5104407/lakewood.png"));
image.doubleClick();
// find and click on the image of the kendrick lake area
image =
driver().findImageElement(new URL("https://dl.dropbox.com/u/5104407/kendrick_lake.png"));
image.doubleClick();
// find and click the Satellite icon to switch to the satellite view
image = driver().findImageElement(new URL("https://dl.dropbox.com/u/5104407/satellite.png"));
image.click();
// find and click the plus button to zoom in
image = driver().findImageElement(new URL("https://dl.dropbox.com/u/5104407/plus.png"));
image.click();
// find and click the link button
WebElement linkButton = driver().findElement(By.id("link"));
linkButton.click();
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
When I try to run the test, the error I get is this one:
[Invoker 18958118] Invoking #BeforeMethod BaseSikuliWebDriverTest.setup(java.lang.reflect.Method, [Ljava.lang.Object;)[pri:0, instance:com.pitito.sikuli.tests.SikuliGoogleCodeTest#137008a]
Failed to invoke configuration method com.pitito.sikuli.base.BaseSikuliWebDriverTest.setup:org.openqa.selenium.remote.RemoteWebDriver$$EnhancerByCGLIB$$52a1cf6f cannot be cast to com.pitito.sikuli.webdriver.SikuliFirefoxDriver
The problem resides here:
sikuliDriver = (SikuliFirefoxDriver) new Augmenter().augment(new RemoteWebDriver(new URL("http://"
+ RemoteWebDriverSession.USER + ":" + RemoteWebDriverSession.APIKEY
+ "#ondemand.saucelabs.com:80/wd/hub"), caps));
How can I use SikuliFirefoxDriver remotely? How can I cast RemoteWebDriver with SikuliFirefoxDriver? Can I do it?

As far as I know, the Selenium Grid server does not have the capability of passing Sikuli commands (and binary screenshots for comparison purposes) through its JSON api. Not even SauceLabs has this capability. Hopefully, its on the radar to be implemented someday. On the SauceLabs forum, there is someone that asked this question (and I answered that one also with this same answer).
I know that there is a project in-progress called Marionette that is supposed to be able to automate browser/Firefox menus and native dialogs.

I implemented a remote driver version of sikuli driver. You can use that to do this action.
Please feel free to fork:
https://github.com/AJ-72/SikuliRemoteWebdriver

My guess is that SikuliFirefoxDriver cannot be augmented because it was not invoked as RemoteWebdriver. Try invoke it as Remote webdriver with sikuli as desired capabilities.
Please, post here if it worked (I could not find proofs if it is possible, but still worth a shot)

Related

Selenium + JUnit, Assertion error of proper page loaded

I've written a code using Selenium WD and JUnit that should check if proper page is loaded and the table of this page is not null
Besides assertion, to make sure that proper page is loaded (checking if URL contains specified text), I've put "if-else" statement in "waitUntilPageLoaded" method.
import junit.framework.TestCase;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.junit.*;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
public class GuruSearch
{
#Test
static void checkDemoLogin()
{
System.setProperty("webdriver.gecko.driver", "C:\\Users\\pansa\\Documents\\Webdrivers\\geckodriver.exe");
FirefoxDriver driver = new FirefoxDriver();
driver.get("https://demo.guru99.com/");
TestCase.assertFalse(driver.getPageSource().contains("404"));
driver.manage().window().maximize();
//odczekaj(5000);
WebElement browser = driver.findElement(By.name("emailid"));
browser.sendKeys("Test#test.com");
browser.submit();
waitUntilPageLoaded(driver,"access.php", 10);
TestCase.assertTrue(driver.getPageSource().contains("Access details to demo site"));
WebElement table = driver.findElement(By.tagName("tbody"));
TestCase.assertNotNull(table);
driver.close();
}
static private void odczekaj(int czas)
{
try {
Thread.sleep(czas);
} catch (InterruptedException e) {
e.printStackTrace();
System.out.println("Przerwanie");
}
}
private static void waitUntilPageLoaded(FirefoxDriver d, String zawiera, int timeout)
{
WebDriverWait wait = new WebDriverWait(d, timeout);
wait.until(ExpectedConditions.urlContains(zawiera));
if (d.getCurrentUrl().contains(zawiera))
{
System.out.println("OK");
}
else
{
System.out.println("NOK");
}
}
}
The "if-else" statement in "waitUntilPageLoaded" method returns "OK", but TestCase.assertTrue(driver.getPageSource().contains("Access details to demo site"))
throws AssertionError, although the text appears in the page.
Why there is AssertionError thrown?
As you are using the JUnit annotations, the annotated method shouldn't be static. I have modified your code a bit, try the below:
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import junit.framework.TestCase;
public class GuruSearch
{
#Test
public void checkDemoLogin()
{
System.setProperty("webdriver.chrome.driver", "C:\\NotBackedUp\\chromedriver.exe");
WebDriver driver = new ChromeDriver();
driver.get("https://demo.guru99.com/");
TestCase.assertFalse(driver.getPageSource().contains("404"));
driver.manage().window().maximize();
//odczekaj(5000);
WebElement browser = driver.findElement(By.name("emailid"));
browser.sendKeys("Test#test.com");
browser.submit();
waitUntilPageLoaded(driver,"access.php", 30);
TestCase.assertTrue(driver.getPageSource().contains("Access details to demo site"));
WebElement table = driver.findElement(By.tagName("tbody"));
TestCase.assertNotNull(table);
driver.close();
}
static private void odczekaj(int czas)
{
try {
Thread.sleep(czas);
} catch (InterruptedException e) {
e.printStackTrace();
System.out.println("Przerwanie");
}
}
private static void waitUntilPageLoaded(WebDriver d, String zawiera, int timeout)
{
WebDriverWait wait = new WebDriverWait(d, timeout);
wait.until(ExpectedConditions.urlContains(zawiera));
if (d.getCurrentUrl().contains(zawiera))
{
System.out.println("OK");
}
else
{
System.out.println("NOK");
}
}
}
The above code is printing 'OK' and Assert condition also passing. And I think, it doesn't matter if we give upper/lower case in the contains. It will match both the cases.
I don't have Firefox in my system so I have checked with Chrome, you change the browser and try... I hope it helps...

Selenium Java not finding elements and/or can't click

i hope you'll find a solution for my problem.
The page I try to test (I'll have an ARender deployed in my company and I need to test it) : http://arender.fr/ARender/
The xpath I use to click on next page (given by FireBug, I tried to use anothers but does the same thing):
//*[#id='id_#_0.7290579307692522']/tbody/tr/td[2]/div/img
I tried many things and I really don't find the solution, I already tried to make a Javascript executor click it...
Java code :
package firstPackage;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.NoSuchElementException;
import java.util.concurrent.TimeUnit;
import firstPackage.Props;
import firstPackage.methods;
import firstPackage.PropTech;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.junit.*;
import org.openqa.selenium.WebDriver;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
import org.openqa.selenium.*;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.ie.InternetExplorerDriverLogLevel;
import org.openqa.selenium.ie.InternetExplorerDriverService;
import org.openqa.selenium.support.ui.Select;
public class TestSelenium {
private WebDriver driver;
protected static InternetExplorerDriverService service;
private StringBuffer verificationErrors = new StringBuffer();
#Before
public void setUp()
{
System.out.println("*******************");
System.out.println("launching IE browser");
System.setProperty("webdriver.ie.driver", PropTech.driverPath+"IEDriverServer.exe");
driver = new InternetExplorerDriver();
driver.manage().window().maximize();
}
#Test
public void test() throws Exception
{
driver.navigate().to("arender.fr" + "/ARender/");
Thread.sleep(15000);
driver.findElement(By.xpath("//div[#title='Next page']/img")).click();
}
#After
public void tearDown()
{
if(driver!=null)
{
System.out.println("Closing IE browser");
driver.quit();
//Kill les process
try {
Runtime.getRuntime().exec("taskkill /F /IM IEDriverServer.exe");
Runtime.getRuntime().exec("taskkill /F /IM iexplore.exe");
} catch (IOException e) {
e.printStackTrace();
}
}
}
It might be dinamically loaded on the page, try to use WebdriverWait. And please pay attention on xpath, I've changed it a little. Checked execution in IE
WebDriverWait wait = new WebDriverWait(driver, 30);
driver.get("http://arender.fr/ARender/");
//Click element after it bacomes clickable
wait.until(ExpectedConditions.elementToBeClickable(By.xpath("//td[div[#title='Next page']]"))).click();
The following XPath should work:
//div[#title='Next page']/img
Seems the ID which you used in you xpath is dynamically generated. (#id='id_#_0.7290579307692522') can you confirm it's remains same even after refreshing the page?
If not try to make xpath with some other attributes or use xpath whilecard search.
Xpath wildcard search
thanks

Why does this ID-selection not work in Jsoup?

I am trying to get data from a webpage (http://steamcommunity.com/id/Winning117/games/?tab=all) using a specific tag but I keep getting null. My desired result is to get the "hours played" for a specific game - Cluckles' Adventure in this case.
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
public class TestScrape {
public static void main(String[] args) throws Exception {
String url = "http://steamcommunity.com/id/Winning117/games/?tab=all";
Document document = Jsoup.connect(url).get();
Element playTime = document.select("div#game_605250").first();
System.out.println(playTime);
}
}
Edit: How can I tell if a webpage is using JavaScript and is therefore unable to be parsed by Jsoup?
To execute javascript in java code there is Selenium :
Selenium-WebDriver makes direct calls to the browser using each
browser’s native support for automation.
To include it with maven use this dependency:
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-server</artifactId>
<version>3.4.0</version>
</dependency>
Next I give you code of simple JUnit test that creates instance of WebDriver and goes to given url and executes simple script to get rgGames .
File chromedriver you have to download at https://sites.google.com/a/chromium.org/chromedriver/downloads.
package SeleniumProject.selenium;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriverService;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.WebDriverWait;
import junit.framework.TestCase;
#RunWith(JUnit4.class)
public class ChromeTest extends TestCase {
private static ChromeDriverService service;
private WebDriver driver;
#BeforeClass
public static void createAndStartService() {
service = new ChromeDriverService.Builder()
.usingDriverExecutable(new File("D:\\Downloads\\chromedriver_win32\\chromedriver.exe"))
.withVerbose(false).usingAnyFreePort().build();
try {
service.start();
} catch (IOException e) {
System.out.println("service didn't start");
// TODO Auto-generated catch block
e.printStackTrace();
}
}
#AfterClass
public static void createAndStopService() {
service.stop();
}
#Before
public void createDriver() {
ChromeOptions chromeOptions = new ChromeOptions();
DesiredCapabilities capabilities = DesiredCapabilities.chrome();
capabilities.setCapability(ChromeOptions.CAPABILITY, chromeOptions);
driver = new RemoteWebDriver(service.getUrl(), capabilities);
}
#After
public void quitDriver() {
driver.quit();
}
#Test
public void testJS() {
JavascriptExecutor js = (JavascriptExecutor) driver;
// Load a new web page in the current browser window.
driver.get("http://steamcommunity.com/id/Winning117/games/?tab=all");
// Executes JavaScript in the context of the currently selected frame or
// window.
ArrayList<Map> list = (ArrayList<Map>) js.executeScript("return rgGames;");
// Map represent properties for one game
for (Map map : list) {
for (Object key : map.keySet()) {
// take each key to find key "name" and compare its vale to
// Cluckles' Adventure
if (key instanceof String && key.equals("name") && map.get(key).equals("Cluckles' Adventure")) {
// print all properties for game Cluckles' Adventure
map.forEach((key1, value) -> {
System.out.println(key1 + " : " + value);
});
}
}
}
}
}
As you can see selenium loads page at
driver.get("http://steamcommunity.com/id/Winning117/games/?tab=all");
And to get data of all games by Winning117 it returns rgGames variable:
ArrayList<Map> list = (ArrayList<Map>) js.executeScript("return rgGames;");
The page you want to scrape is load by js,and there is not any #game_605250 element that jsoup get.All datas are write in page by using js.
But when I print document to a file ,I see some data like this:
<script language="javascript">
var rgGames = [{"appid":224260,"name":"No More Room in Hell","logo":"http:\/\/cdn.steamstatic.com.8686c.com\/steamcommunity\/public\/images\/apps\/224260\/670e9aba35dc53a6eb2bc686d302d357a4939489.jpg","friendlyURL":224260,"availStatLinks":{"achievements":true,"global_achievements":true,"stats":false,"leaderboards":false,"global_leaderboards":false},"hours_forever":"515","last_played":1492042097},{"appid":241540,"name":"State of Decay","logo":"http:\/\/....
then,you can extract 'rgGames' by some StringTools and format it to json obj.
It't not a clerver method,but it worked
try this :
public class TestScrape {
public static void main(String[] args) throws Exception {
String url = "http://steamcommunity.com/id/Winning117/games/?tab=all";
Document document = Jsoup.connect(url).get();
Element playTime = document.select("div#game_605250");
Elements val = playTime.select(".hours_played");
System.out.println(val.text());
}
}

Where to find chromedriver.log in selenium using java. Where can i see the log file of chromedriver?

I want to extract chrome logs in java selenium web driver project. I am using following to launch chromedriver.
> dc = DesiredCapabilities.chrome();
> System.setProperty("webdriver.chrome.logFile","//src//resource//log.txt");
Another similar question has following solution.
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.logging.LogEntries;
import org.openqa.selenium.logging.LogEntry;
import org.openqa.selenium.logging.LogType;
import org.openqa.selenium.logging.LoggingPreferences;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class ChromeConsoleLogging {
private WebDriver driver;
#BeforeMethod
public void setUp() {
System.setProperty("webdriver.chrome.driver", "c:\\path\\to\\chromedriver.exe");
DesiredCapabilities caps = DesiredCapabilities.chrome();
LoggingPreferences logPrefs = new LoggingPreferences();
logPrefs.enable(LogType.BROWSER, Level.ALL);
caps.setCapability(CapabilityType.LOGGING_PREFS, logPrefs);
driver = new ChromeDriver(caps);
}
#AfterMethod
public void tearDown() {
driver.quit();
}
public void analyzeLog() {
LogEntries logEntries = driver.manage().logs().get(LogType.BROWSER);
for (LogEntry entry : logEntries) {
System.out.println(new Date(entry.getTimestamp()) + " " + entry.getLevel() + " " + entry.getMessage());
//do something useful with the data
}
}
#Test
public void testMethod() {
driver.get("http://mypage.com");
//do something on page
analyzeLog();
}
}
Solution is not useful for me, as I want to save browser log for every session with the session Id itself.

How to optimize the scroll-down code in java using selenium

I am working a project in MAVEN using Java.
I have to get a URL, scroll them down ,and get all the links of other items in this given web page.
Till now, I get the page dynamically using Selenium , and scrolling them down, and fetch the links also. But it takes too much time. Please help me in optimize that.
Example:-, I am working on a page , whose link is here.
My Questions :-
Scrolling web page using selenium is very slow. How can I optimize this? (Suggest any other method
to do the same or help me to optimize this one)
Thanks in advance. Looking for your kind response.
Code to dynamically get and scroll the page:-
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import com.google.common.collect.*;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import org.apache.commons.io.FileUtils;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.FirefoxProfile;
/**
*
* #author jhamb
*/
public class Scroll_down {
private static FirefoxProfile createFirefoxProfile() {
File profileDir = new File("/tmp/firefox-profile-dir");
if (profileDir.exists()) {
return new FirefoxProfile(profileDir);
}
FirefoxProfile firefoxProfile = new FirefoxProfile();
File dir = firefoxProfile.layoutOnDisk();
try {
profileDir.mkdirs();
FileUtils.copyDirectory(dir, profileDir);
} catch (IOException e) {
e.printStackTrace();
}
return firefoxProfile;
}
public static void main(String[] args) throws InterruptedException{
String url1 = "http://www.jabong.com/men/shoes/men-sports-shoes/?source=home-leftnav";
System.out.println("Fetching %s..." + url1);
WebDriver driver = new FirefoxDriver(createFirefoxProfile());
driver.get(url1);
JavascriptExecutor jse = (JavascriptExecutor)driver;
jse.executeScript("window.scrollBy(0,250)", "");
for (int second = 0;; second++) {
if (second >= 60) {
break;
}
jse.executeScript("window.scrollBy(0,200)", "");
Thread.sleep(1000);
}
String hml = driver.getPageSource();
driver.close();
Document document = Jsoup.parse(hml);
Elements links = document.select("div");
for (Element link : links) {
System.out.println(link.attr("data-url"));
}
}
}
Well Selenium scrolling is based on Javascript. I dont know your goal with selenium though, you have no assertion to compare anything in your code ?
When you are so sure that your data fetching so fast then don't use any sleep methode.
Sleep methods makes selenium slower, but yeah it is waiting until the element is properly loaded .....
It's up to you, what to test though
How about page down?
ele.sendKeys(Keys.PAGE_DOWN); //WebElement ele = <Any existing element>
Repeat this till you find that particular item.

Categories