Notice
Recent Posts
Recent Comments
Link
일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | 4 | 5 | ||
6 | 7 | 8 | 9 | 10 | 11 | 12 |
13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 | 28 | 29 | 30 |
Tags
- SWIFT
- appium server
- STF
- ssh
- PYTHON
- Jupyter Notebook
- STF_PortForwarding
- perfect
- appium
- rethinkdb
- nmap
- port forwarding
- mysql
- Materials
- GoCD
- ubuntu
- sshpass
- nGrinder
- centos
- kitura
- insert
- ftp
- 실행권한
- postgres
- openpyxl
- 28015
- Jupyter
- postgresql
- nohup
- create table
Archives
- Today
- Total
don't stop believing
Appium Unity App Client 실행 (OpenCV) 본문
Appium에서 Unity App에 대한 자동화 스크립트 입니다.
Unity App이기 때문에 화면이 이미지로 인식되어 element를 찾을 수 없습니다. 그래서 OpenCV를 적용해 이미지 매칭(template matching)을 사용했습니다.
OpenCV는 아래 url을 참고해 주세요.
[http://dejavuqa.tistory.com/category/Python/OpenCV]
template matching을 사용하기 위해 두개의 함수를 만들었습니다.
하나는 자동으로 Canny해주는 함수고 다른 하나는 디바이스의 화면을 screenshot 후 찾으려는 이미지를 매치해 가운데 좌표를 반환하는 함수입니다.
먼저 auto_canny() 입니다. 이미지의 외곽선만 표시해 단순화 합니다.
12345678def auto_canny(self, image, sigma=0.33):v = np.median(image) # compute the median of the single channel pixel intensitieslower = int(max(0, (1.0 - sigma) * v))upper = int(min(255, (1.0 + sigma) * v))edged = cv2.Canny(image, lower, upper)return edged # return the edged image
그리고 detectImage() 입니다. 디바이스의 screenshot을 확인하고 찾으려는 이미지와 매칭해 screenshot에서 좌표를 반환합니다.
1234567891011121314151617181920212223242526def detectImage(self, templateImagePath):shotName = '{:%Y-%m-%d-%H-%M-%S}'.format(datetime.datetime.now())directory = '%s/screenshots/' % os.getcwd()screenshotImgPath = directory + shotName + '.png'self.driver.save_screenshot(screenshotImgPath)sleep(2)# 원본 이미지screenImage = cv2.imread(screenshotImgPath, 0)screenCann = self.auto_canny(screenImage)# 찾으려는 이미지templateImage = cv2.imread(templateImagePath, 0)templateCann = self.auto_canny(templateImage)w, h = templateCann.shape[::-1]# 원본에서 template 찾기res = cv2.matchTemplate(screenCann, templateCann, cv2.TM_CCOEFF_NORMED)min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)top_left = max_locx = top_left[0] + int(w/2)y = top_left[1] + int(h/2)return x, y
위 두 함수를 이용해 스크립트를 작성합니다.
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283"""Android Unity Script"""import unittestimport cv2import numpy as npfrom matplotlib import pyplot as pltimport os, sysfrom appium import webdriverfrom appium.webdriver.common.touch_action import TouchActionfrom time import sleepimport datetimeclass TableSearchTest(unittest.TestCase):def setUp(self):# Set up appiumapp = os.path.join(os.path.dirname(__file__), '/Users/tongchunkim/Documents/TestAppium/appfiles/', 'UnityTestAndroid.apk')app = os.path.abspath(app)self.driver = webdriver.Remote(command_executor='http://127.0.0.1:4723/wd/hub',desired_capabilities={'app': app,'platformName': 'Android','platformVersion': '6.0','deviceName': 'V10','automationName': 'Appium','appPackage': 'com.kakaogames.sdk.unitysample'})def auto_canny(self, image, sigma=0.33):v = np.median(image) # compute the median of the single channel pixel intensitieslower = int(max(0, (1.0 - sigma) * v))upper = int(min(255, (1.0 + sigma) * v))edged = cv2.Canny(image, lower, upper)return edged # return the edged imagedef detectImage(self, templateImagePath):shotName = '{:%Y-%m-%d-%H-%M-%S}'.format(datetime.datetime.now())directory = '%s/screenshots/' % os.getcwd()screenshotImgPath = directory + shotName + '.png'self.driver.save_screenshot(screenshotImgPath)sleep(2)# 원본 이미지screenImage = cv2.imread(screenshotImgPath, 0)screenCann = self.auto_canny(screenImage)# 찾으려는 이미지templateImage = cv2.imread(templateImagePath, 0)templateCann = self.auto_canny(templateImage)w, h = templateCann.shape[::-1]# 원본에서 template 찾기res = cv2.matchTemplate(screenCann, templateCann, cv2.TM_CCOEFF_NORMED)min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)top_left = max_locx = top_left[0] + int(w/2)y = top_left[1] + int(h/2)return x, ydef test_search_field(self):driver = self.driversleep(10)co_x, co_y = self.detectImage('./templates/and-button-start.png')action = TouchAction(self.driver)action.tap(None, 720, 2367, 1).perform()sleep(30)def tearDown(self):self.driver.quit()if __name__ == '__main__':suite = unittest.TestLoader().loadTestsFromTestCase(TableSearchTest)unittest.TextTestRunner(verbosity=2).run(suite)
iOS 스크립트도 동일합니다.
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788"""iOS Unity Script"""import unittestimport cv2import numpy as npfrom matplotlib import pyplot as pltimport os, sysfrom appium import webdriverfrom appium.webdriver.common.touch_action import TouchActionfrom time import sleepimport datetimeclass TableSearchTest(unittest.TestCase):def setUp(self):# Set up appiumapp = os.path.join(os.path.dirname(__file__), '/Users/tongchunkim/Documents/TestAppium/appfiles/', 'com.kakaogames.sdk.unitysample.ipa')app = os.path.abspath(app)self.driver = webdriver.Remote(command_executor='http://127.0.0.1:4723/wd/hub',desired_capabilities={'platformName': 'ios','platformVersion': '11.0','deviceName': 'iPhone 6','automationName': 'XCUITest','newCommandTimeout': 7200,'bundleId': 'com.kakaogames.sdk.unitysample','udid': '73439839ee3db7b59fcdd8bc3aa8cc4862006b7b','xcodeOrgId': 'WVC7779982','xcodeSigningId': 'iPhone Developer'})def auto_canny(self, image, sigma=0.33):v = np.median(image) # compute the median of the single channel pixel intensitieslower = int(max(0, (1.0 - sigma) * v))upper = int(min(255, (1.0 + sigma) * v))edged = cv2.Canny(image, lower, upper)return edged # return the edged imagedef detectImage(self, templateImagePath):shotName = '{:%Y-%m-%d-%H-%M-%S}'.format(datetime.datetime.now())directory = '%s/screenshots/' % os.getcwd()screenshotImgPath = directory + shotName + '.png'self.driver.save_screenshot(screenshotImgPath)sleep(2)# 원본 이미지screenImage = cv2.imread(screenshotImgPath, 0)screenCann = self.auto_canny(screenImage)# 찾으려는 이미지templateImage = cv2.imread(templateImagePath, 0)templateCann = self.auto_canny(templateImage)w, h = templateCann.shape[::-1]# 원본에서 template 찾기res = cv2.matchTemplate(screenCann, templateCann, cv2.TM_CCOEFF_NORMED)min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)top_left = max_locx = top_left[0] + int(w/2)y = top_left[1] + int(h/2)return x, ydef test_search_field(self):driver = self.driversleep(10)co_x, co_y = self.detectImage('./templates/ios-button-start.png')action = TouchAction(self.driver)action.tap(None, co_x, co_y, 1).perform()sleep(30)def tearDown(self):self.driver.quit()if __name__ == '__main__':suite = unittest.TestLoader().loadTestsFromTestCase(TableSearchTest)unittest.TextTestRunner(verbosity=2).run(suite)
'Testing Automation > Appium' 카테고리의 다른 글
WiFi를 이용한 디바이스 연결 (Android) (0) | 2017.12.05 |
---|---|
Selenium Grid를 이용한 appium 멀티 실행 (3) | 2017.11.30 |
appium 좌표를 찍을 수 없는 부분 (1) | 2017.11.28 |
iOS 실행관련 몇 가지 사항 (0) | 2017.11.28 |
Enterprise App 테스트 (소스없는 api 파일 실행) (0) | 2017.11.27 |