vg_lite_path.c 214 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441
  1. /****************************************************************************
  2. *
  3. * Copyright 2012 - 2023 Vivante Corporation, Santa Clara, California.
  4. * All Rights Reserved.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining
  7. * a copy of this software and associated documentation files (the
  8. * 'Software'), to deal in the Software without restriction, including
  9. * without limitation the rights to use, copy, modify, merge, publish,
  10. * distribute, sub license, and/or sell copies of the Software, and to
  11. * permit persons to whom the Software is furnished to do so, subject
  12. * to the following conditions:
  13. *
  14. * The above copyright notice and this permission notice (including the
  15. * next paragraph) shall be included in all copies or substantial
  16. * portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
  19. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  20. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  21. * IN NO EVENT SHALL VIVANTE AND/OR ITS SUPPLIERS BE LIABLE FOR ANY
  22. * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  23. * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  24. * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  25. *
  26. *****************************************************************************/
  27. #include "vg_lite_context.h"
  28. /* Path data operations. */
  29. #define CDALIGN(value, by) (((value) + (by) - 1) & ~((by) - 1))
  30. #define CDMIN(x, y) ((x) > (y) ? (y) : (x))
  31. #define CDMAX(x, y) ((x) > (y) ? (x) : (y))
  32. extern uint32_t transform(vg_lite_point_t* result, vg_lite_float_t x, vg_lite_float_t y, vg_lite_matrix_t* matrix);
  33. extern uint32_t convert_blend(vg_lite_blend_t blend);
  34. extern uint32_t inverse(vg_lite_matrix_t* result, vg_lite_matrix_t* matrix);
  35. extern uint32_t convert_yuv2rgb(vg_lite_yuv2rgb_t yuv);
  36. extern uint32_t convert_uv_swizzle(vg_lite_swizzle_t swizzle);
  37. extern uint32_t convert_source_format(vg_lite_buffer_format_t format);
  38. extern vg_lite_error_t check_compress(vg_lite_buffer_format_t format, vg_lite_compress_mode_t compress_mode, vg_lite_buffer_layout_t tiled, uint32_t width, uint32_t height);
  39. extern void get_format_bytes(vg_lite_buffer_format_t format, uint32_t* mul, uint32_t* div, uint32_t* bytes_align);
  40. extern vg_lite_error_t srcbuf_align_check(vg_lite_buffer_t* source);
  41. extern vg_lite_matrix_t identity_mtx;
  42. /* Convert VGLite data format to HW value. */
  43. static uint32_t convert_path_format(vg_lite_format_t format)
  44. {
  45. switch (format) {
  46. case VG_LITE_S8:
  47. return 0;
  48. case VG_LITE_S16:
  49. return 0x100000;
  50. case VG_LITE_S32:
  51. return 0x200000;
  52. case VG_LITE_FP32:
  53. return 0x300000;
  54. default:
  55. return 0;
  56. }
  57. }
  58. /* Convert VGLite quality enums to HW values. */
  59. static uint32_t convert_path_quality(vg_lite_quality_t quality)
  60. {
  61. switch (quality) {
  62. case VG_LITE_HIGH:
  63. return 0x3;
  64. case VG_LITE_UPPER:
  65. return 0x2;
  66. case VG_LITE_MEDIUM:
  67. return 0x1;
  68. default:
  69. return 0x0;
  70. }
  71. }
  72. static int32_t get_data_count(uint8_t cmd)
  73. {
  74. static int32_t count[] = {
  75. 0,
  76. 0,
  77. 2,
  78. 2,
  79. 2,
  80. 2,
  81. 4,
  82. 4,
  83. 6,
  84. 6,
  85. 0,
  86. 1,
  87. 1,
  88. 1,
  89. 1,
  90. 2,
  91. 2,
  92. 4,
  93. 4,
  94. 5,
  95. 5,
  96. 5,
  97. 5,
  98. 5,
  99. 5,
  100. 5,
  101. 5
  102. };
  103. if (cmd > VLC_OP_LCWARC_REL) {
  104. return -1;
  105. }
  106. else {
  107. return count[cmd];
  108. }
  109. }
  110. static void compute_pathbounds(float* xmin, float* ymin, float* xmax, float* ymax, float x, float y)
  111. {
  112. if (xmin != NULL)
  113. {
  114. *xmin = *xmin < x ? *xmin : x;
  115. }
  116. if (xmax != NULL)
  117. {
  118. *xmax = *xmax > x ? *xmax : x;
  119. }
  120. if (ymin != NULL)
  121. {
  122. *ymin = *ymin < y ? *ymin : y;
  123. }
  124. if (ymax != NULL)
  125. {
  126. *ymax = *ymax > y ? *ymax : y;
  127. }
  128. }
  129. int32_t get_data_size(vg_lite_format_t format)
  130. {
  131. int32_t data_size = 0;
  132. switch (format) {
  133. case VG_LITE_S8:
  134. data_size = sizeof(int8_t);
  135. break;
  136. case VG_LITE_S16:
  137. data_size = sizeof(int16_t);
  138. break;
  139. case VG_LITE_S32:
  140. data_size = sizeof(int32_t);
  141. break;
  142. default:
  143. data_size = sizeof(vg_lite_float_t);
  144. break;
  145. }
  146. return data_size;
  147. }
  148. vg_lite_error_t vg_lite_init_path(vg_lite_path_t* path,
  149. vg_lite_format_t data_format,
  150. vg_lite_quality_t quality,
  151. vg_lite_uint32_t path_length,
  152. vg_lite_pointer path_data,
  153. vg_lite_float_t min_x, vg_lite_float_t min_y,
  154. vg_lite_float_t max_x, vg_lite_float_t max_y)
  155. {
  156. int32_t data_size, num = 0;
  157. if (path == NULL)
  158. return VG_LITE_INVALID_ARGUMENT;
  159. memset(path, 0, sizeof(*path));
  160. path->format = data_format;
  161. path->quality = quality;
  162. path->bounding_box[0] = min_x;
  163. path->bounding_box[1] = min_y;
  164. path->bounding_box[2] = max_x;
  165. path->bounding_box[3] = max_y;
  166. /* Path data cannot end with a CLOSE op. Replace CLOSE with END for path_data */
  167. data_size = get_data_size(data_format);
  168. num = path_length / data_size;
  169. switch (data_format)
  170. {
  171. case VG_LITE_S8:
  172. if (path_data && (*((char*)path_data + num - 1) == VLC_OP_CLOSE))
  173. {
  174. *(char*)((int*)path_data + num - 1) = VLC_OP_END;
  175. }
  176. break;
  177. case VG_LITE_S16:
  178. if (path_data && (*(char*)((short*)path_data + num - 1) == VLC_OP_CLOSE))
  179. {
  180. *(char*)((short*)path_data + num - 1) = VLC_OP_END;
  181. }
  182. break;
  183. case VG_LITE_S32:
  184. if (path_data && (*(char*)((int*)path_data + num - 1) == VLC_OP_CLOSE))
  185. {
  186. *(char*)((int*)path_data + num - 1) = VLC_OP_END;
  187. }
  188. break;
  189. case VG_LITE_FP32:
  190. if (path_data && (*(char*)((float*)path_data + num - 1) == VLC_OP_CLOSE))
  191. {
  192. *(char*)((float*)path_data + num - 1) = VLC_OP_END;
  193. }
  194. break;
  195. default:
  196. break;
  197. }
  198. path->path_length = path_length;
  199. path->path = path_data;
  200. path->path_changed = 1;
  201. path->uploaded.address = 0;
  202. path->uploaded.bytes = 0;
  203. path->uploaded.handle = NULL;
  204. path->uploaded.memory = NULL;
  205. path->pdata_internal = 0;
  206. s_context.path_lastX = 0;
  207. s_context.path_lastY = 0;
  208. /* Default FILL path type*/
  209. path->path_type = VG_LITE_DRAW_FILL_PATH;
  210. return VG_LITE_SUCCESS;
  211. }
  212. vg_lite_error_t vg_lite_set_path_type(vg_lite_path_t* path, vg_lite_path_type_t path_type)
  213. {
  214. if (!path ||
  215. (path_type != VG_LITE_DRAW_FILL_PATH &&
  216. path_type != VG_LITE_DRAW_STROKE_PATH &&
  217. path_type != VG_LITE_DRAW_FILL_STROKE_PATH)
  218. )
  219. return VG_LITE_INVALID_ARGUMENT;
  220. path->path_type = path_type;
  221. return VG_LITE_SUCCESS;
  222. }
  223. vg_lite_error_t vg_lite_clear_path(vg_lite_path_t* path)
  224. {
  225. vg_lite_error_t error;
  226. if (path->uploaded.handle != NULL) {
  227. vg_lite_kernel_free_t free_cmd;
  228. free_cmd.memory_handle = path->uploaded.handle;
  229. error = vg_lite_kernel(VG_LITE_FREE, &free_cmd);
  230. if (error != VG_LITE_SUCCESS)
  231. return error;
  232. }
  233. #if (CHIPID==0x355)
  234. if (path->stroke && path->stroke->uploaded.handle != NULL) {
  235. vg_lite_kernel_free_t free_cmd;
  236. free_cmd.memory_handle = path->stroke->uploaded.handle;
  237. error = vg_lite_kernel(VG_LITE_FREE, &free_cmd);
  238. if (error != VG_LITE_SUCCESS)
  239. return error;
  240. }
  241. #endif
  242. path->uploaded.address = 0;
  243. path->uploaded.bytes = 0;
  244. path->uploaded.handle = NULL;
  245. path->uploaded.memory = NULL;
  246. #if (CHIPID==0x355)
  247. if (path->stroke) {
  248. path->stroke->uploaded.address = 0;
  249. path->stroke->uploaded.bytes = 0;
  250. path->stroke->uploaded.handle = NULL;
  251. path->stroke->uploaded.memory = NULL;
  252. }
  253. #endif
  254. if (path->pdata_internal == 1 && path->path != NULL) {
  255. vg_lite_os_free(path->path);
  256. }
  257. path->path = NULL;
  258. if (path->stroke_path) {
  259. vg_lite_os_free(path->stroke_path);
  260. path->stroke_path = NULL;
  261. }
  262. #if gcFEATURE_VG_STROKE_PATH
  263. if (path->stroke) {
  264. if (path->stroke->path_list_divide) {
  265. vg_lite_path_list_ptr cur_list;
  266. while (path->stroke->path_list_divide) {
  267. cur_list = path->stroke->path_list_divide->next;
  268. if (path->stroke->path_list_divide->path_points) {
  269. vg_lite_path_point_ptr temp_point;
  270. while (path->stroke->path_list_divide->path_points) {
  271. temp_point = path->stroke->path_list_divide->path_points->next;
  272. vg_lite_os_free(path->stroke->path_list_divide->path_points);
  273. path->stroke->path_list_divide->path_points = temp_point;
  274. }
  275. temp_point = NULL;
  276. }
  277. vg_lite_os_free(path->stroke->path_list_divide);
  278. path->stroke->path_list_divide = cur_list;
  279. }
  280. cur_list = 0;
  281. }
  282. if (path->stroke->stroke_paths) {
  283. vg_lite_sub_path_ptr temp_sub_path;
  284. while (path->stroke->stroke_paths) {
  285. temp_sub_path = path->stroke->stroke_paths->next;
  286. if (path->stroke->stroke_paths->point_list) {
  287. vg_lite_path_point_ptr temp_point;
  288. while (path->stroke->stroke_paths->point_list) {
  289. temp_point = path->stroke->stroke_paths->point_list->next;
  290. vg_lite_os_free(path->stroke->stroke_paths->point_list);
  291. path->stroke->stroke_paths->point_list = temp_point;
  292. }
  293. temp_point = NULL;
  294. }
  295. vg_lite_os_free(path->stroke->stroke_paths);
  296. path->stroke->stroke_paths = temp_sub_path;
  297. }
  298. temp_sub_path = NULL;
  299. }
  300. if (path->stroke->dash_pattern)
  301. vg_lite_os_free(path->stroke->dash_pattern);
  302. vg_lite_os_free(path->stroke);
  303. path->stroke = NULL;
  304. path->stroke_valid = 0;
  305. path->stroke_size = 0;
  306. }
  307. #endif
  308. return VG_LITE_SUCCESS;
  309. }
  310. vg_lite_error_t vg_lite_upload_path(vg_lite_path_t * path)
  311. {
  312. #if DUMP_API
  313. FUNC_DUMP(vg_lite_upload_path)(path);
  314. #endif
  315. vg_lite_error_t error = VG_LITE_SUCCESS;
  316. uint32_t bytes;
  317. vg_lite_buffer_t Buf, *buffer;
  318. buffer = &Buf;
  319. /* Compute the number of bytes required for path + command buffer prefix/postfix. */
  320. bytes = (8 + path->path_length + 7 + 8) & ~7;
  321. /* Allocate GPU memory. */
  322. buffer->width = bytes;
  323. buffer->height = 1;
  324. buffer->stride = 0;
  325. buffer->format = VG_LITE_A8;
  326. VG_LITE_RETURN_ERROR(vg_lite_allocate(buffer));
  327. /* Initialize command buffer prefix. */
  328. ((uint32_t *) buffer->memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  329. ((uint32_t *) buffer->memory)[1] = 0;
  330. /* Copy the path data. */
  331. memcpy((uint32_t *) buffer->memory + 2, path->path, path->path_length);
  332. /* Initialize command buffer postfix. */
  333. ((uint32_t *) buffer->memory)[(bytes >> 2) - 2] = VG_LITE_RETURN();
  334. ((uint32_t *) buffer->memory)[(bytes >> 2) - 1] = 0;
  335. /* Mark path as uploaded. */
  336. path->path = buffer->memory;
  337. path->uploaded.handle = buffer->handle;
  338. path->uploaded.address = buffer->address;
  339. path->uploaded.memory = buffer->memory;
  340. path->uploaded.bytes = bytes;
  341. path->path_changed = 0;
  342. VLM_PATH_ENABLE_UPLOAD(*path); /* Implicitly enable path uploading. */
  343. /* Return pointer to vg_lite_buffer structure. */
  344. return error;
  345. }
  346. vg_lite_uint32_t vg_lite_get_path_length(vg_lite_uint8_t *cmd, vg_lite_uint32_t count, vg_lite_format_t format)
  347. {
  348. uint32_t size = 0;
  349. int32_t dCount = 0;
  350. uint32_t i = 0;
  351. int32_t data_size = 0;
  352. data_size = get_data_size(format);
  353. for (i = 0; i < count; i++) {
  354. size++; /* OP CODE. */
  355. dCount = get_data_count(cmd[i]);
  356. size = CDALIGN(size, data_size);
  357. size += dCount * data_size;
  358. }
  359. if (cmd[count - 1] != VLC_OP_END || cmd[count - 1] != VLC_OP_CLOSE) {
  360. size++;
  361. size = CDALIGN(size, data_size);
  362. }
  363. return size;
  364. }
  365. vg_lite_error_t vg_lite_append_path(vg_lite_path_t *path,
  366. vg_lite_uint8_t *cmd,
  367. vg_lite_pointer data,
  368. vg_lite_uint32_t seg_count)
  369. {
  370. vg_lite_error_t error = VG_LITE_SUCCESS;
  371. uint32_t i;
  372. int32_t j;
  373. int32_t offset = 0;
  374. int32_t dataCount = 0;
  375. float *dataf = (float*) data;
  376. float *pathf = NULL;
  377. int32_t *data_s32 = (int32_t*) data;
  378. int32_t *path_s32 = NULL;
  379. int16_t *data_s16 = (int16_t*) data;
  380. int16_t *path_s16 = NULL;
  381. int8_t *data_s8 = (int8_t*) data;
  382. int8_t *path_s8 = NULL;
  383. uint8_t *pathc = NULL;
  384. int32_t data_size;
  385. uint8_t arc_path = 0;
  386. uint8_t h_v_path = 0;
  387. uint8_t smooth_path = 0;
  388. float px = 0.0f, py = 0.0f, cx = 0.0f, cy = 0.0f;
  389. int rel = 0;
  390. if (cmd == NULL || data == NULL || path == NULL)
  391. return VG_LITE_INVALID_ARGUMENT;
  392. for(i = 0; i < seg_count; i++) {
  393. if (cmd[i] > VLC_OP_LCWARC_REL)
  394. return VG_LITE_INVALID_ARGUMENT;
  395. }
  396. /* Support NULL path->path case for OpenVG */
  397. if (!path->path) {
  398. data_size = vg_lite_get_path_length(cmd, seg_count, path->format);
  399. path->path = (vg_lite_pointer)vg_lite_os_malloc(data_size);
  400. if (!path->path)
  401. {
  402. return VG_LITE_OUT_OF_RESOURCES;
  403. }
  404. path->pdata_internal = 1;
  405. memset(path->path, 0, data_size);
  406. }
  407. data_size = get_data_size(path->format);
  408. path->path_changed= 1;
  409. pathf = (float *)path->path;
  410. path_s32 = (int32_t *)path->path;
  411. path_s16 = (int16_t *)path->path;
  412. path_s8 = (int8_t *)path->path;
  413. pathc = (uint8_t *)path->path;
  414. /* Set bounding box if the first opcode is VLC_OP_MOVE_* */
  415. if ((cmd[0] & 0xfe) == VLC_OP_MOVE) {
  416. switch (path->format)
  417. {
  418. case VG_LITE_S8:
  419. cx = (float)data_s8[0];
  420. cy = (float)data_s8[1];
  421. break;
  422. case VG_LITE_S16:
  423. cx = (float)data_s16[0];
  424. cy = (float)data_s16[1];
  425. break;
  426. case VG_LITE_S32:
  427. cx = (float)data_s32[0];
  428. cy = (float)data_s32[1];
  429. break;
  430. case VG_LITE_FP32:
  431. cx = (float)dataf[0];
  432. cy = (float)dataf[1];
  433. break;
  434. }
  435. path->bounding_box[0] = path->bounding_box[2] = cx;
  436. path->bounding_box[1] = path->bounding_box[3] = cy;
  437. }
  438. /* Loop to fill path data. */
  439. for (i = 0; i < seg_count; i++) {
  440. #if (CHIPID == 0x355)
  441. if (cmd[i] == VLC_OP_CLOSE && (cmd[i + 1] == VLC_OP_MOVE || cmd[i + 1] == VLC_OP_MOVE_REL)) {
  442. continue;
  443. }
  444. else
  445. #endif
  446. {
  447. *(pathc + offset) = cmd[i];
  448. }
  449. offset++;
  450. dataCount = get_data_count(cmd[i]);
  451. /* compute the bounding_box. */
  452. if (dataCount >= 0) {
  453. offset = CDALIGN(offset, data_size);
  454. if ((cmd[i] > VLC_OP_CLOSE) &&
  455. (cmd[i] < VLC_OP_HLINE) &&
  456. ((cmd[i] & 0x01) == 1)) {
  457. rel = 1;
  458. }
  459. else if ((cmd[i] >= VLC_OP_HLINE) &&
  460. ((cmd[i] & 0x01) == 0)) {
  461. rel = 1;
  462. }
  463. else {
  464. rel = 0;
  465. }
  466. if (cmd[i] >= VLC_OP_HLINE && cmd[i] <= VLC_OP_VLINE_REL) {
  467. switch (path->format) {
  468. case VG_LITE_S8:
  469. path_s8 = (int8_t*)(pathc + offset);
  470. path_s8[0] = *data_s8;
  471. data_s8++;
  472. if (rel) {
  473. cx = px + (float)path_s8[0];
  474. cy = py + (float)path_s8[1];
  475. }
  476. else {
  477. cx = (float)path_s8[0];
  478. cy = (float)path_s8[1];
  479. }
  480. break;
  481. case VG_LITE_S16:
  482. path_s16 = (int16_t*)(pathc + offset);
  483. path_s16[0] = *data_s16;
  484. data_s16++;
  485. if (rel) {
  486. cx = px + (float)path_s16[0];
  487. cy = py + (float)path_s16[1];
  488. }
  489. else {
  490. cx = (float)path_s16[0];
  491. cy = (float)path_s16[1];
  492. }
  493. break;
  494. case VG_LITE_S32:
  495. path_s32 = (int32_t*)(pathc + offset);
  496. path_s32[0] = *data_s32;
  497. data_s32++;
  498. if (rel) {
  499. cx = px + (float)path_s32[0];
  500. cy = py + (float)path_s32[1];
  501. }
  502. else {
  503. cx = (float)path_s32[0];
  504. cy = (float)path_s32[1];
  505. }
  506. break;
  507. case VG_LITE_FP32:
  508. pathf = (float*)(pathc + offset);
  509. pathf[0] = *dataf;
  510. dataf++;
  511. if (rel) {
  512. cx = px + (float)pathf[0];
  513. cy = py + (float)pathf[1];
  514. }
  515. else {
  516. cx = (float)pathf[0];
  517. cy = (float)pathf[1];
  518. }
  519. break;
  520. }
  521. h_v_path = 1;
  522. /* Update path bounds. */
  523. path->bounding_box[0] = CDMIN(path->bounding_box[0], cx);
  524. path->bounding_box[2] = CDMAX(path->bounding_box[2], cx);
  525. path->bounding_box[1] = CDMIN(path->bounding_box[1], cy);
  526. path->bounding_box[3] = CDMAX(path->bounding_box[3], cy);
  527. }
  528. else if (cmd[i] < VLC_OP_SCCWARC) {
  529. /* Mark smooth path,convert it in next step. */
  530. if (cmd[i] <= VLC_OP_SCUBIC_REL && cmd[i] >= VLC_OP_SQUAD) {
  531. smooth_path = 1;
  532. }
  533. for (j = 0; j < dataCount / 2; j++) {
  534. switch (path->format) {
  535. case VG_LITE_S8:
  536. path_s8 = (int8_t *)(pathc + offset);
  537. path_s8[j * 2] = *data_s8;
  538. data_s8++;
  539. path_s8[j * 2 + 1] = *data_s8;
  540. data_s8++;
  541. if (rel) {
  542. cx = px + path_s8[j * 2];
  543. cy = py + path_s8[j * 2 + 1];
  544. }
  545. else {
  546. cx = path_s8[j * 2];
  547. cy = path_s8[j * 2 + 1];
  548. }
  549. break;
  550. case VG_LITE_S16:
  551. path_s16 = (int16_t *)(pathc + offset);
  552. path_s16[j * 2] = *data_s16;
  553. data_s16++;
  554. path_s16[j * 2 + 1] = *data_s16;
  555. data_s16++;
  556. if (rel) {
  557. cx = px + path_s16[j * 2];
  558. cy = py + path_s16[j * 2 + 1];
  559. }
  560. else {
  561. cx = path_s16[j * 2];
  562. cy = path_s16[j * 2 + 1];
  563. }
  564. break;
  565. case VG_LITE_S32:
  566. path_s32 = (int32_t *)(pathc + offset);
  567. path_s32[j * 2] = *data_s32;
  568. data_s32++;
  569. path_s32[j * 2 + 1] = *data_s32;
  570. data_s32++;
  571. if (rel) {
  572. cx = px + path_s32[j * 2];
  573. cy = py + path_s32[j * 2 + 1];
  574. }
  575. else {
  576. cx = (float)path_s32[j * 2];
  577. cy = (float)path_s32[j * 2 + 1];
  578. }
  579. break;
  580. case VG_LITE_FP32:
  581. pathf = (float *)(pathc + offset);
  582. pathf[j * 2] = *dataf;
  583. dataf++;
  584. pathf[j * 2 + 1] = *dataf;
  585. dataf++;
  586. if (rel) {
  587. cx = px + pathf[j * 2];
  588. cy = py + pathf[j * 2 + 1];
  589. }
  590. else {
  591. cx = pathf[j * 2];
  592. cy = pathf[j * 2 + 1];
  593. }
  594. break;
  595. default:
  596. return VG_LITE_INVALID_ARGUMENT;
  597. }
  598. if (cmd[i] <= VLC_OP_LINE_REL && cmd[i] >= VLC_OP_MOVE) {
  599. /* Update move to and line path bounds. */
  600. path->bounding_box[0] = CDMIN(path->bounding_box[0], cx);
  601. path->bounding_box[2] = CDMAX(path->bounding_box[2], cx);
  602. path->bounding_box[1] = CDMIN(path->bounding_box[1], cy);
  603. path->bounding_box[3] = CDMAX(path->bounding_box[3], cy);
  604. }
  605. }
  606. }
  607. #if gcFEATURE_VG_ARC_PATH
  608. else {
  609. arc_path = 1;
  610. switch (path->format) {
  611. case VG_LITE_S8:
  612. path_s8 = (int8_t*)(pathc + offset);
  613. path_s8[0] = *data_s8;
  614. data_s8++;
  615. path_s8[1] = *data_s8;
  616. data_s8++;
  617. path_s8[2] = *data_s8;
  618. data_s8++;
  619. path_s8[3] = *data_s8;
  620. data_s8++;
  621. path_s8[4] = *data_s8;
  622. data_s8++;
  623. if (rel) {
  624. cx = px + path_s8[3];
  625. cy = py + path_s8[4];
  626. }
  627. else {
  628. cx = path_s8[3];
  629. cy = path_s8[4];
  630. }
  631. /* Update path bounds. */
  632. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx + 2 * path_s8[0],cy + 2 * path_s8[1]);
  633. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px + 2 * path_s8[1],py + 2 * path_s8[1]);
  634. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx - 2 * path_s8[0],cy - 2 * path_s8[1]);
  635. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px - 2 * path_s8[1],py - 2 * path_s8[1]);
  636. break;
  637. case VG_LITE_S16:
  638. path_s16 = (int16_t*)(pathc + offset);
  639. path_s16[0] = *data_s16;
  640. data_s16++;
  641. path_s16[1] = *data_s16;
  642. data_s16++;
  643. path_s16[2] = *data_s16;
  644. data_s16++;
  645. path_s16[3] = *data_s16;
  646. data_s16++;
  647. path_s16[4] = *data_s16;
  648. data_s16++;
  649. if (rel) {
  650. cx = px + path_s16[3];
  651. cy = py + path_s16[4];
  652. }
  653. else {
  654. cx = path_s16[3];
  655. cy = path_s16[4];
  656. }
  657. /* Update path bounds. */
  658. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx + 2 * path_s16[0],cy + 2 * path_s16[1]);
  659. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px + 2 * path_s16[1],py + 2 * path_s16[1]);
  660. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx - 2 * path_s16[0],cy - 2 * path_s16[1]);
  661. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px - 2 * path_s16[1],py - 2 * path_s16[1]);
  662. break;
  663. case VG_LITE_S32:
  664. path_s32 = (int32_t*)(pathc + offset);
  665. path_s32[0] = *data_s32;
  666. data_s32++;
  667. path_s32[1] = *data_s32;
  668. data_s32++;
  669. path_s32[2] = *data_s32;
  670. data_s32++;
  671. path_s32[3] = *data_s32;
  672. data_s32++;
  673. path_s32[4] = *data_s32;
  674. data_s32++;
  675. if (rel) {
  676. cx = px + path_s32[3];
  677. cy = py + path_s32[4];
  678. }
  679. else {
  680. cx = (float)path_s32[3];
  681. cy = (float)path_s32[4];
  682. }
  683. /* Update path bounds. */
  684. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx + 2 * path_s32[0],cy + 2 * path_s32[1]);
  685. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px + 2 * path_s32[1],py + 2 * path_s32[1]);
  686. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx - 2 * path_s32[0],cy - 2 * path_s32[1]);
  687. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px - 2 * path_s32[1],py - 2 * path_s32[1]);
  688. break;
  689. case VG_LITE_FP32:
  690. pathf = (float*)(pathc + offset);
  691. pathf[0] = *dataf;
  692. dataf++;
  693. pathf[1] = *dataf;
  694. dataf++;
  695. pathf[2] = *dataf;
  696. dataf++;
  697. pathf[3] = *dataf;
  698. dataf++;
  699. pathf[4] = *dataf;
  700. dataf++;
  701. if (rel) {
  702. cx = px + pathf[3];
  703. cy = py + pathf[4];
  704. }
  705. else {
  706. cx = pathf[3];
  707. cy = pathf[4];
  708. }
  709. /* Update path bounds. */
  710. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx + 2 * pathf[0],cy + 2 * pathf[1]);
  711. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px + 2 * pathf[1],py + 2 * pathf[1]);
  712. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],cx - 2 * pathf[0],cy - 2 * pathf[1]);
  713. compute_pathbounds(&path->bounding_box[0], &path->bounding_box[1], &path->bounding_box[2], &path->bounding_box[3],px - 2 * pathf[1],py - 2 * pathf[1]);
  714. break;
  715. }
  716. }
  717. #endif
  718. px = cx;
  719. py = cy;
  720. offset += dataCount * data_size;
  721. }
  722. }
  723. if (cmd[seg_count - 1] == VLC_OP_END
  724. #if gcFEATURE_VG_ARC_PATH
  725. || (cmd[seg_count - 1] == VLC_OP_CLOSE && (arc_path | h_v_path | smooth_path))
  726. #endif
  727. ) {
  728. path->path_length = offset;
  729. }
  730. else {
  731. path->path_length = offset + data_size;
  732. path->add_end = 1;
  733. ((uint8_t*)(path->path))[offset] = 0;
  734. }
  735. #if gcFEATURE_VG_ARC_PATH
  736. if (arc_path | h_v_path | smooth_path) {
  737. error = vg_lite_init_arc_path(path,
  738. path->format,
  739. path->quality,
  740. path->path_length,
  741. path->path,
  742. path->bounding_box[0], path->bounding_box[1],
  743. path->bounding_box[2], path->bounding_box[3]);
  744. }
  745. #endif
  746. s_context.path_lastX = cx;
  747. s_context.path_lastY = cy;
  748. return error;
  749. }
  750. #if (CHIPID==0x355 || CHIPID==0x255) /* GC355/GC255 vg_lite_draw functions */
  751. #define UPDATE_BOUNDING_BOX(bbx, point) \
  752. do { \
  753. if ((point).x < (bbx).x) { \
  754. (bbx).width += (bbx).x - (point).x; \
  755. (bbx).x = (point).x; \
  756. } \
  757. if ((point).y < (bbx).y) { \
  758. (bbx).height += (bbx).y - (point).y; \
  759. (bbx).y = (point).y; \
  760. } \
  761. if ((point).x > (bbx).x + (bbx).width) \
  762. (bbx).width = (point).x - (bbx).x; \
  763. if ((point).y > (bbx).y + (bbx).height) \
  764. (bbx).height = (point).y - (bbx).y; \
  765. } while(0)
  766. static vg_lite_error_t transform_bounding_box(vg_lite_rectangle_t *in_bbx,
  767. vg_lite_matrix_t *matrix,
  768. vg_lite_rectangle_t *clip,
  769. vg_lite_rectangle_t *out_bbx,
  770. vg_lite_point_t *origin)
  771. {
  772. vg_lite_point_t temp;
  773. memset(out_bbx, 0, sizeof(vg_lite_rectangle_t));
  774. /* Transform image point (0, 0). */
  775. if (!transform(&temp, 0.0f, 0.0f, matrix))
  776. return VG_LITE_INVALID_ARGUMENT;
  777. out_bbx->x = temp.x;
  778. out_bbx->y = temp.y;
  779. /* Provide position of the new origin to the caller if requested. */
  780. if (origin != NULL) {
  781. origin->x = temp.x;
  782. origin->y = temp.y;
  783. }
  784. /* Transform image point (0, height). */
  785. if (!transform(&temp, 0.0f, (vg_lite_float_t)in_bbx->height, matrix))
  786. return VG_LITE_INVALID_ARGUMENT;
  787. UPDATE_BOUNDING_BOX(*out_bbx, temp);
  788. /* Transform image point (width, height). */
  789. if (!transform(&temp, (vg_lite_float_t)in_bbx->width, (vg_lite_float_t)in_bbx->height, matrix))
  790. return VG_LITE_INVALID_ARGUMENT;
  791. UPDATE_BOUNDING_BOX(*out_bbx, temp);
  792. /* Transform image point (width, 0). */
  793. if (!transform(&temp, (vg_lite_float_t)in_bbx->width, 0.0f, matrix))
  794. return VG_LITE_INVALID_ARGUMENT;
  795. UPDATE_BOUNDING_BOX(*out_bbx, temp);
  796. /* Clip is required */
  797. if (clip) {
  798. out_bbx->x = MAX(out_bbx->x, clip->x);
  799. out_bbx->y = MAX(out_bbx->y, clip->y);
  800. out_bbx->width = MIN((out_bbx->x + out_bbx->width), (clip->x + clip->width)) - out_bbx->x;
  801. out_bbx->height = MIN((out_bbx->y + out_bbx->height), (clip->y + clip->height)) - out_bbx->y;
  802. }
  803. return VG_LITE_SUCCESS;
  804. }
  805. static vg_lite_error_t set_interpolation_steps(vg_lite_buffer_t *target,
  806. vg_lite_int32_t s_width,
  807. vg_lite_int32_t s_height,
  808. vg_lite_matrix_t *matrix)
  809. {
  810. vg_lite_matrix_t im;
  811. vg_lite_rectangle_t src_bbx, bounding_box, clip;
  812. vg_lite_float_t xs[3], ys[3], cs[3];
  813. vg_lite_error_t error = VG_LITE_SUCCESS;
  814. float dx = 0.0f, dy = 0.0f;
  815. #define ERR_LIMIT 0.0000610351562f
  816. /* Get bounding box. */
  817. memset(&src_bbx, 0, sizeof(vg_lite_rectangle_t));
  818. memset(&clip, 0, sizeof(vg_lite_rectangle_t));
  819. src_bbx.width = (int32_t)s_width;
  820. src_bbx.height = (int32_t)s_height;
  821. if (s_context.scissor_set) {
  822. clip.x = s_context.scissor[0];
  823. clip.y = s_context.scissor[1];
  824. clip.width = s_context.scissor[2];
  825. clip.height = s_context.scissor[3];
  826. } else {
  827. clip.x = clip.y = 0;
  828. clip.width = s_context.rtbuffer->width;
  829. clip.height = s_context.rtbuffer->height;
  830. }
  831. transform_bounding_box(&src_bbx, matrix, &clip, &bounding_box, NULL);
  832. /* Compute inverse matrix. */
  833. if (!inverse(&im, matrix))
  834. return VG_LITE_INVALID_ARGUMENT;
  835. /* Compute interpolation steps. */
  836. /* X step */
  837. xs[0] = im.m[0][0] / s_width;
  838. xs[1] = im.m[1][0] / s_height;
  839. xs[2] = im.m[2][0];
  840. /* Y step */
  841. ys[0] = im.m[0][1] / s_width;
  842. ys[1] = im.m[1][1] / s_height;
  843. ys[2] = im.m[2][1];
  844. /* C step 2 */
  845. cs[2] = 0.5f * (im.m[2][0] + im.m[2][1]) + im.m[2][2];
  846. /* C step 0, 1*/
  847. cs[0] = (0.5f * (im.m[0][0] + im.m[0][1]) + im.m[0][2] + dx) / s_width;
  848. cs[1] = (0.5f * (im.m[1][0] + im.m[1][1]) + im.m[1][2] + dy) / s_height;
  849. /* Set command buffer */
  850. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *)&cs[0]));
  851. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *)&cs[1]));
  852. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *)&cs[2]));
  853. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *)&xs[0]));
  854. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *)&xs[1]));
  855. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *)&xs[2]));
  856. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  857. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *)&ys[0]));
  858. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *)&ys[1]));
  859. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *)&ys[2]));
  860. return VG_LITE_SUCCESS;
  861. }
  862. static vg_lite_error_t set_interpolation_steps_draw_paint(vg_lite_buffer_t* target,
  863. vg_lite_int32_t s_width,
  864. vg_lite_int32_t s_height,
  865. vg_lite_matrix_t* matrix)
  866. {
  867. vg_lite_matrix_t im;
  868. vg_lite_rectangle_t src_bbx, bounding_box, clip;
  869. vg_lite_float_t xs[3], ys[3], cs[3];
  870. vg_lite_error_t error = VG_LITE_SUCCESS;
  871. float dx = 0.0f, dy = 0.0f;
  872. #define ERR_LIMIT 0.0000610351562f
  873. /* Get bounding box. */
  874. memset(&src_bbx, 0, sizeof(vg_lite_rectangle_t));
  875. memset(&clip, 0, sizeof(vg_lite_rectangle_t));
  876. src_bbx.width = (int32_t)s_width;
  877. src_bbx.height = (int32_t)s_height;
  878. if (s_context.scissor_set) {
  879. clip.x = s_context.scissor[0];
  880. clip.y = s_context.scissor[1];
  881. clip.width = s_context.scissor[2];
  882. clip.height = s_context.scissor[3];
  883. }
  884. else {
  885. clip.x = clip.y = 0;
  886. clip.width = s_context.rtbuffer->width;
  887. clip.height = s_context.rtbuffer->height;
  888. }
  889. transform_bounding_box(&src_bbx, matrix, &clip, &bounding_box, NULL);
  890. /* Compute inverse matrix. */
  891. if (!inverse(&im, matrix))
  892. return VG_LITE_INVALID_ARGUMENT;
  893. /* Compute interpolation steps. */
  894. /* X step */
  895. xs[0] = im.m[0][0] / s_width;
  896. xs[1] = im.m[1][0] / s_height;
  897. xs[2] = im.m[2][0];
  898. /* Y step */
  899. ys[0] = im.m[0][1] / s_width;
  900. ys[1] = im.m[1][1] / s_height;
  901. ys[2] = im.m[2][1];
  902. /* C step 2 */
  903. cs[2] = 0.5f * (im.m[2][0] + im.m[2][1]) + im.m[2][2];
  904. /* C step 0, 1*/
  905. cs[0] = (0.5f * (im.m[0][0] + im.m[0][1]) + im.m[0][2] + dx) / s_width;
  906. cs[1] = (0.5f * (im.m[1][0] + im.m[1][1]) + im.m[1][2] + dy) / s_height;
  907. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A04, (void*)&cs[0]));
  908. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A05, (void*)&cs[1]));
  909. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A06, (void*)&xs[0]));
  910. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A07, (void*)&xs[1]));
  911. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A08, (void*)&ys[0]));
  912. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A09, (void*)&ys[1]));
  913. /* Set command buffer */
  914. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void*)&cs[0]));
  915. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void*)&cs[1]));
  916. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void*)&cs[2]));
  917. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void*)&xs[0]));
  918. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void*)&xs[1]));
  919. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void*)&xs[2]));
  920. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  921. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void*)&ys[0]));
  922. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void*)&ys[1]));
  923. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void*)&ys[2]));
  924. return VG_LITE_SUCCESS;
  925. }
  926. /* GC355/GC255 vg_lite_draw API implementation
  927. */
  928. vg_lite_error_t vg_lite_draw(vg_lite_buffer_t *target,
  929. vg_lite_path_t *path,
  930. vg_lite_fill_t fill_rule,
  931. vg_lite_matrix_t * matrix,
  932. vg_lite_blend_t blend,
  933. vg_lite_color_t color)
  934. {
  935. uint32_t blend_mode;
  936. uint32_t format, quality, tiling, fill;
  937. uint32_t tessellation_size;
  938. vg_lite_error_t error;
  939. int32_t dst_align_width;
  940. uint32_t mul, div, align;
  941. vg_lite_point_t point_min = {0}, point_max = {0}, temp = {0};
  942. int x, y, width, height;
  943. uint8_t ts_is_fullscreen = 0;
  944. uint32_t in_premult = 0;
  945. uint32_t premul_flag = 0;
  946. uint32_t prediv_flag = 0;
  947. #if(CHIPID == 0x355)
  948. uint8_t *path_re = NULL;
  949. uint32_t index = 0;
  950. #endif
  951. #if gcFEATURE_VG_TRACE_API
  952. VGLITE_LOG("vg_lite_draw %p %p %d %p %d 0x%08X\n", target, path, fill_rule, matrix, blend, color);
  953. VGLITE_LOG(" path_type %d, path_length %d, stroke_size %d\n", path->path_type, path->path_length, path->stroke_size);
  954. #endif
  955. #if gcFEATURE_VG_ERROR_CHECK
  956. #if !gcFEATURE_VG_QUALITY_8X
  957. if (path->quality == VG_LITE_UPPER) {
  958. return VG_LITE_NOT_SUPPORT;
  959. }
  960. #endif
  961. if (!path || !path->path) {
  962. return VG_LITE_INVALID_ARGUMENT;
  963. }
  964. #if (CHIPID == 0x355)
  965. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  966. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  967. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  968. printf("Target format: 0x%x is not supported.\n", target->format);
  969. return VG_LITE_NOT_SUPPORT;
  970. }
  971. #endif
  972. #endif /* gcFEATURE_VG_ERROR_CHECK */
  973. if (!path->path_length) {
  974. return VG_LITE_SUCCESS;
  975. }
  976. if (!matrix) {
  977. matrix = &identity_mtx;
  978. }
  979. #if gcFEATURE_VG_GAMMA
  980. set_gamma_dest_only(target, VGL_FALSE);
  981. #endif
  982. /*blend input into context*/
  983. s_context.blend_mode = blend;
  984. /* Adjust premultiply setting according to openvg condition */
  985. target->apply_premult = 0;
  986. premul_flag = (s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE);
  987. if (target->premultiplied == 0 && premul_flag == 0) {
  988. in_premult = 0x10000000;
  989. target->apply_premult = 1;
  990. }
  991. else if ((target->premultiplied == 1) ||
  992. (target->premultiplied == 0 && premul_flag == 1)) {
  993. in_premult = 0x00000000;
  994. }
  995. if (blend == VG_LITE_BLEND_NORMAL_LVGL) {
  996. in_premult = 0x00000000;
  997. }
  998. error = set_render_target(target);
  999. if (error != VG_LITE_SUCCESS) {
  1000. return error;
  1001. } else if (error == VG_LITE_NO_CONTEXT) {
  1002. /* If scissoring is enabled and no valid scissoring rectangles
  1003. are present, no drawing occurs */
  1004. return VG_LITE_SUCCESS;
  1005. }
  1006. width = s_context.tessbuf.tess_w_h & 0xFFFF;
  1007. height = s_context.tessbuf.tess_w_h >> 16;
  1008. get_format_bytes(target->format, &mul, &div, &align);
  1009. dst_align_width = target->stride * div / mul;
  1010. if (width == 0 || height == 0)
  1011. return VG_LITE_NO_CONTEXT;
  1012. if ((dst_align_width <= width) && (target->height <= height))
  1013. {
  1014. ts_is_fullscreen = 1;
  1015. point_min.x = 0;
  1016. point_min.y = 0;
  1017. point_max.x = dst_align_width;
  1018. point_max.y = target->height;
  1019. }
  1020. if (ts_is_fullscreen == 0){
  1021. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], matrix);
  1022. point_min = point_max = temp;
  1023. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], matrix);
  1024. if (temp.x < point_min.x) point_min.x = temp.x;
  1025. if (temp.y < point_min.y) point_min.y = temp.y;
  1026. if (temp.x > point_max.x) point_max.x = temp.x;
  1027. if (temp.y > point_max.y) point_max.y = temp.y;
  1028. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], matrix);
  1029. if (temp.x < point_min.x) point_min.x = temp.x;
  1030. if (temp.y < point_min.y) point_min.y = temp.y;
  1031. if (temp.x > point_max.x) point_max.x = temp.x;
  1032. if (temp.y > point_max.y) point_max.y = temp.y;
  1033. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], matrix);
  1034. if (temp.x < point_min.x) point_min.x = temp.x;
  1035. if (temp.y < point_min.y) point_min.y = temp.y;
  1036. if (temp.x > point_max.x) point_max.x = temp.x;
  1037. if (temp.y > point_max.y) point_max.y = temp.y;
  1038. if (point_min.x < 0) point_min.x = 0;
  1039. if (point_min.y < 0) point_min.y = 0;
  1040. if (point_max.x > dst_align_width) point_max.x = dst_align_width;
  1041. if (point_max.y > target->height) point_max.y = target->height;
  1042. if (s_context.scissor_set) {
  1043. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  1044. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  1045. point_max.x = MIN(point_max.x, s_context.scissor[0] + s_context.scissor[2]);
  1046. point_max.y = MIN(point_max.y, s_context.scissor[1] + s_context.scissor[3]);
  1047. }
  1048. }
  1049. /* Convert states into hardware values. */
  1050. blend_mode = convert_blend(blend);
  1051. format = convert_path_format(path->format);
  1052. quality = convert_path_quality(path->quality);
  1053. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  1054. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  1055. tessellation_size = s_context.tessbuf.L2_size ? s_context.tessbuf.L2_size : s_context.tessbuf.L1_size;
  1056. /* Setup the command buffer. */
  1057. /* Program color register. */
  1058. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | s_context.capabilities.cap.tiled | blend_mode | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  1059. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  1060. /* Program tessellation control: for TS module. */
  1061. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | fill));
  1062. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  1063. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  1064. /* Program matrix. */
  1065. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &matrix->m[0][0]));
  1066. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &matrix->m[0][1]));
  1067. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &matrix->m[0][2]));
  1068. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &matrix->m[1][0]));
  1069. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &matrix->m[1][1]));
  1070. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &matrix->m[1][2]));
  1071. /* Setup tessellation loop. */
  1072. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH)
  1073. {
  1074. for (y = point_min.y; y < point_max.y; y += height) {
  1075. for (x = point_min.x; x < point_max.x; x += width) {
  1076. /* Tessellate path. */
  1077. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  1078. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  1079. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  1080. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  1081. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  1082. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  1083. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  1084. }
  1085. else {
  1086. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  1087. }
  1088. }
  1089. }
  1090. }
  1091. /* Setup tessellation loop. */
  1092. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  1093. for (y = point_min.y; y < point_max.y; y += height) {
  1094. for (x = point_min.x; x < point_max.x; x += width) {
  1095. /* Tessellate path. */
  1096. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  1097. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  1098. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  1099. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  1100. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  1101. format = convert_path_format(VG_LITE_FP32);
  1102. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  1103. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  1104. if (VLM_PATH_STROKE_GET_UPLOAD_BIT(*path) == 1) {
  1105. VG_LITE_RETURN_ERROR(push_call(&s_context, path->stroke->uploaded.address, path->stroke->uploaded.bytes));
  1106. }
  1107. else {
  1108. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  1109. }
  1110. }
  1111. }
  1112. }
  1113. /* Finialize command buffer. */
  1114. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  1115. return error;
  1116. }
  1117. /* GC355/GC255 vg_lite_draw_pattern API implementation
  1118. */
  1119. vg_lite_error_t vg_lite_draw_pattern(vg_lite_buffer_t *target,
  1120. vg_lite_path_t *path,
  1121. vg_lite_fill_t fill_rule,
  1122. vg_lite_matrix_t *path_matrix,
  1123. vg_lite_buffer_t *source,
  1124. vg_lite_matrix_t *pattern_matrix,
  1125. vg_lite_blend_t blend,
  1126. vg_lite_pattern_mode_t pattern_mode,
  1127. vg_lite_color_t pattern_color,
  1128. vg_lite_color_t color,
  1129. vg_lite_filter_t filter)
  1130. {
  1131. vg_lite_error_t error = VG_LITE_SUCCESS;
  1132. uint32_t imageMode;
  1133. uint32_t blend_mode;
  1134. uint32_t filter_mode = 0;
  1135. int32_t dst_align_width;
  1136. uint32_t mul, div, align;
  1137. uint32_t conversion = 0;
  1138. uint32_t tiled_source;
  1139. vg_lite_matrix_t matrix;
  1140. uint32_t pattern_tile = 0;
  1141. uint32_t transparency_mode = 0;
  1142. /* The following code is from "draw path" */
  1143. uint32_t format, quality, tiling, fill;
  1144. uint32_t tessellation_size;
  1145. vg_lite_point_t point_min = {0}, point_max = {0}, temp = {0};
  1146. int x, y, width, height;
  1147. uint8_t ts_is_fullscreen = 0;
  1148. uint32_t in_premult = 0;
  1149. uint32_t src_premultiply_enable = 0;
  1150. uint32_t paintType = 0;
  1151. uint32_t premul_flag = 0;
  1152. uint32_t prediv_flag = 0;
  1153. uint8_t lvgl_sw_blend = 0;
  1154. #if(CHIPID == 0X355)
  1155. uint8_t* path_re = NULL;
  1156. uint32_t index = 0;
  1157. #endif
  1158. #if gcFEATURE_VG_TRACE_API
  1159. VGLITE_LOG("vg_lite_draw_pattern %p %p %d %p %p %p %d %d 0x%08X %d\n",
  1160. target, path, fill_rule, path_matrix, source, pattern_matrix, blend, pattern_mode, pattern_color, filter);
  1161. #endif
  1162. #if gcFEATURE_VG_ERROR_CHECK
  1163. #if !gcFEATURE_VG_QUALITY_8X
  1164. if (path->quality == VG_LITE_UPPER) {
  1165. return VG_LITE_NOT_SUPPORT;
  1166. }
  1167. #endif
  1168. if (source->format == VG_LITE_A4 || source->format == VG_LITE_A8) {
  1169. return VG_LITE_NOT_SUPPORT;
  1170. }
  1171. if (!path || !path->path) {
  1172. return VG_LITE_INVALID_ARGUMENT;
  1173. }
  1174. #if (CHIPID == 0x355)
  1175. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  1176. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  1177. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  1178. printf("Target format: 0x%x is not supported.\n", target->format);
  1179. return VG_LITE_NOT_SUPPORT;
  1180. }
  1181. #endif
  1182. #endif /* gcFEATURE_VG_ERROR_CHECK */
  1183. #if !gcFEATURE_VG_LVGL_SUPPORT
  1184. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  1185. if (!source->lvgl_buffer) {
  1186. source->lvgl_buffer = (vg_lite_buffer_t *)vg_lite_os_malloc(sizeof(vg_lite_buffer_t));
  1187. *source->lvgl_buffer = *source;
  1188. source->lvgl_buffer->lvgl_buffer = NULL;
  1189. vg_lite_allocate(source->lvgl_buffer);
  1190. }
  1191. /* Make sure render target is up to date before reading RT. */
  1192. vg_lite_finish();
  1193. setup_lvgl_image(target, source, source->lvgl_buffer, blend);
  1194. blend = VG_LITE_BLEND_SRC_OVER;
  1195. lvgl_sw_blend = 1;
  1196. }
  1197. #endif
  1198. if (!path->path_length) {
  1199. return VG_LITE_SUCCESS;
  1200. }
  1201. if (!path_matrix) {
  1202. path_matrix = &identity_mtx;
  1203. }
  1204. if (!pattern_matrix) {
  1205. pattern_matrix = &identity_mtx;
  1206. }
  1207. /* Work on pattern states. */
  1208. matrix = *pattern_matrix;
  1209. if (source->paintType == VG_LITE_PAINT_PATTERN)
  1210. {
  1211. matrix.m[2][0] = 0;
  1212. matrix.m[2][1] = 0;
  1213. matrix.m[2][2] = 1;
  1214. source->image_mode = VG_LITE_NONE_IMAGE_MODE;
  1215. }
  1216. #if gcFEATURE_VG_GAMMA
  1217. save_st_gamma_src_dest(source, target);
  1218. #endif
  1219. /*blend input into context*/
  1220. s_context.blend_mode = blend;
  1221. in_premult = 0x00000000;
  1222. /* Adjust premultiply setting according to openvg condition */
  1223. src_premultiply_enable = 0x01000100;
  1224. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  1225. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  1226. prediv_flag = 0;
  1227. }
  1228. else {
  1229. prediv_flag = 1;
  1230. }
  1231. if ((s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE) {
  1232. premul_flag = 1;
  1233. }
  1234. else {
  1235. premul_flag = 0;
  1236. }
  1237. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  1238. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  1239. src_premultiply_enable = 0x01000100;
  1240. in_premult = 0x10000000;
  1241. }
  1242. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  1243. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  1244. src_premultiply_enable = 0x00000100;
  1245. in_premult = 0x00000000;
  1246. }
  1247. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  1248. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  1249. src_premultiply_enable = 0x01000100;
  1250. in_premult = 0x00000000;
  1251. }
  1252. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  1253. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  1254. src_premultiply_enable = 0x00000100;
  1255. in_premult = 0x00000000;
  1256. }
  1257. if ((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  1258. in_premult = 0x00000000;
  1259. }
  1260. if (blend == VG_LITE_BLEND_NORMAL_LVGL) {
  1261. in_premult = 0x00000000;
  1262. }
  1263. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  1264. target->apply_premult = 1;
  1265. }
  1266. else {
  1267. target->apply_premult = 0;
  1268. }
  1269. error = set_render_target(target);
  1270. if (error != VG_LITE_SUCCESS) {
  1271. return error;
  1272. } else if (error == VG_LITE_NO_CONTEXT) {
  1273. /* If scissoring is enabled and no valid scissoring rectangles
  1274. are present, no drawing occurs */
  1275. return VG_LITE_SUCCESS;
  1276. }
  1277. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  1278. width = s_context.tessbuf.tess_w_h & 0xFFFF;
  1279. height = s_context.tessbuf.tess_w_h >> 16;
  1280. get_format_bytes(target->format, &mul, &div, &align);
  1281. dst_align_width = target->stride * div / mul;
  1282. if (width == 0 || height == 0)
  1283. return VG_LITE_NO_CONTEXT;
  1284. if ((dst_align_width <= width) && (target->height <= height))
  1285. {
  1286. ts_is_fullscreen = 1;
  1287. point_min.x = 0;
  1288. point_min.y = 0;
  1289. point_max.x = dst_align_width;
  1290. point_max.y = target->height;
  1291. }
  1292. /* If target is L8 and source is in YUV or RGB (not L8 or A8) then we have to convert RGB into L8. */
  1293. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  1294. conversion = 0x80000000;
  1295. }
  1296. /* Determine image mode (NORMAL or MULTIPLY) depending on the color. */
  1297. imageMode = (source->image_mode == VG_LITE_NONE_IMAGE_MODE) ? 0 : (source->image_mode == VG_LITE_MULTIPLY_IMAGE_MODE) ? 0x00002000 : 0x00001000;
  1298. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  1299. if (pattern_mode == VG_LITE_PATTERN_COLOR)
  1300. {
  1301. uint8_t a,r,g,b;
  1302. pattern_tile = 0;
  1303. a = pattern_color >> 24;
  1304. r = pattern_color >> 16;
  1305. g = pattern_color >> 8;
  1306. b = pattern_color;
  1307. pattern_color = (a << 24) | (b << 16) | (g << 8) | r;
  1308. }
  1309. else if (pattern_mode == VG_LITE_PATTERN_PAD)
  1310. {
  1311. pattern_tile = 0x1000;
  1312. }
  1313. #if gcFEATURE_VG_IM_REPEAT_REFLECT
  1314. else if (pattern_mode == VG_LITE_PATTERN_REPEAT)
  1315. {
  1316. pattern_tile = 0x2000;
  1317. }
  1318. else if (pattern_mode == VG_LITE_PATTERN_REFLECT)
  1319. {
  1320. pattern_tile = 0x3000;
  1321. }
  1322. #endif
  1323. else
  1324. {
  1325. return VG_LITE_INVALID_ARGUMENT;
  1326. }
  1327. switch (filter) {
  1328. case VG_LITE_FILTER_POINT:
  1329. filter_mode = 0;
  1330. break;
  1331. case VG_LITE_FILTER_LINEAR:
  1332. filter_mode = 0x10000;
  1333. break;
  1334. case VG_LITE_FILTER_BI_LINEAR:
  1335. filter_mode = 0x20000;
  1336. break;
  1337. case VG_LITE_FILTER_GAUSSIAN:
  1338. filter_mode = 0x30000;
  1339. break;
  1340. }
  1341. if (source->paintType == VG_LITE_PAINT_PATTERN)
  1342. {
  1343. VG_LITE_RETURN_ERROR(set_interpolation_steps_draw_paint(target, source->width, source->height, &matrix));
  1344. /* enable pre-multiplied in image unit */
  1345. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source->format) |
  1346. filter_mode | pattern_tile | conversion | src_premultiply_enable));
  1347. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, pattern_color));
  1348. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source->address));
  1349. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, source->stride | tiled_source));
  1350. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  1351. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source->width | (source->height << 16)));
  1352. }
  1353. else
  1354. {
  1355. VG_LITE_RETURN_ERROR(set_interpolation_steps(target, source->width, source->height, &matrix));
  1356. /* enable pre-multiplied in image unit */
  1357. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) |
  1358. filter_mode | pattern_tile | conversion | src_premultiply_enable));
  1359. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A27, pattern_color));
  1360. #if !gcFEATURE_VG_LVGL_SUPPORT
  1361. if (lvgl_sw_blend) {
  1362. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->lvgl_buffer->address));
  1363. }
  1364. else
  1365. #endif
  1366. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->address));
  1367. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, source->stride | tiled_source));
  1368. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2D, 0));
  1369. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2F, source->width | (source->height << 16)));
  1370. }
  1371. /* Work on path states. */
  1372. matrix = *path_matrix;
  1373. if (ts_is_fullscreen == 0){
  1374. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], &matrix);
  1375. point_min = point_max = temp;
  1376. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], &matrix);
  1377. if (temp.x < point_min.x) point_min.x = temp.x;
  1378. if (temp.y < point_min.y) point_min.y = temp.y;
  1379. if (temp.x > point_max.x) point_max.x = temp.x;
  1380. if (temp.y > point_max.y) point_max.y = temp.y;
  1381. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], &matrix);
  1382. if (temp.x < point_min.x) point_min.x = temp.x;
  1383. if (temp.y < point_min.y) point_min.y = temp.y;
  1384. if (temp.x > point_max.x) point_max.x = temp.x;
  1385. if (temp.y > point_max.y) point_max.y = temp.y;
  1386. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], &matrix);
  1387. if (temp.x < point_min.x) point_min.x = temp.x;
  1388. if (temp.y < point_min.y) point_min.y = temp.y;
  1389. if (temp.x > point_max.x) point_max.x = temp.x;
  1390. if (temp.y > point_max.y) point_max.y = temp.y;
  1391. point_min.x = MAX(point_min.x, 0);
  1392. point_min.y = MAX(point_min.y, 0);
  1393. point_max.x = MIN(point_max.x, dst_align_width);
  1394. point_max.y = MIN(point_max.y, target->height);
  1395. if (s_context.scissor_set) {
  1396. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  1397. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  1398. point_max.x = MIN(point_max.x, s_context.scissor[0] + s_context.scissor[2]);
  1399. point_max.y = MIN(point_max.y, s_context.scissor[1] + s_context.scissor[3]);
  1400. }
  1401. }
  1402. /* Convert states into hardware values. */
  1403. blend_mode = convert_blend(blend);
  1404. format = convert_path_format(path->format);
  1405. quality = convert_path_quality(path->quality);
  1406. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  1407. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  1408. tessellation_size = s_context.tessbuf.L2_size ? s_context.tessbuf.L2_size : s_context.tessbuf.L1_size;
  1409. /* Setup the command buffer. */
  1410. /* Program color register. */
  1411. if (source->paintType == VG_LITE_PAINT_PATTERN) {
  1412. paintType = 1 << 24 | 1 << 25;
  1413. }
  1414. /* enable pre-multiplied from VG to VGPE */
  1415. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x2 | in_premult | paintType | s_context.capabilities.cap.tiled | imageMode | blend_mode | transparency_mode | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  1416. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000400 | format | quality | tiling | fill));
  1417. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  1418. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  1419. /* Program matrix. */
  1420. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &matrix.m[0][0]));
  1421. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &matrix.m[0][1]));
  1422. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &matrix.m[0][2]));
  1423. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &matrix.m[1][0]));
  1424. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &matrix.m[1][1]));
  1425. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &matrix.m[1][2]));
  1426. /* Setup tessellation loop. */
  1427. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH)
  1428. {
  1429. for (y = point_min.y; y < point_max.y; y += height) {
  1430. for (x = point_min.x; x < point_max.x; x += width) {
  1431. /* Tessellate path. */
  1432. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  1433. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  1434. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  1435. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  1436. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  1437. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  1438. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  1439. }
  1440. else {
  1441. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  1442. }
  1443. }
  1444. }
  1445. }
  1446. /* Setup tessellation loop. */
  1447. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  1448. for (y = point_min.y; y < point_max.y; y += height) {
  1449. for (x = point_min.x; x < point_max.x; x += width) {
  1450. /* Tessellate path. */
  1451. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  1452. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  1453. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  1454. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  1455. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  1456. format = convert_path_format(VG_LITE_FP32);
  1457. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  1458. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  1459. if (VLM_PATH_STROKE_GET_UPLOAD_BIT(*path) == 1) {
  1460. VG_LITE_RETURN_ERROR(push_call(&s_context, path->stroke->uploaded.address, path->stroke->uploaded.bytes));
  1461. }
  1462. else {
  1463. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  1464. }
  1465. }
  1466. }
  1467. }
  1468. /* Finialize command buffer. */
  1469. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  1470. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride)* (source->height));
  1471. return error;
  1472. }
  1473. /* GC355/GC255 vg_lite_draw_linear_grad API implementation
  1474. */
  1475. vg_lite_error_t vg_lite_draw_linear_grad(vg_lite_buffer_t * target,
  1476. vg_lite_path_t * path,
  1477. vg_lite_fill_t fill_rule,
  1478. vg_lite_matrix_t * path_matrix,
  1479. vg_lite_linear_gradient_ext_t *grad,
  1480. vg_lite_color_t paint_color,
  1481. vg_lite_blend_t blend,
  1482. vg_lite_filter_t filter)
  1483. {
  1484. vg_lite_error_t error = VG_LITE_SUCCESS;
  1485. uint32_t image_mode;
  1486. uint32_t blend_mode;
  1487. uint32_t filter_mode = 0;
  1488. uint32_t conversion = 0;
  1489. uint32_t tiled_source;
  1490. int32_t dst_align_width;
  1491. uint32_t mul, div, align;
  1492. vg_lite_matrix_t inverse_matrix;
  1493. vg_lite_buffer_t * source = &grad->image;
  1494. vg_lite_matrix_t * matrix = &grad->matrix;
  1495. uint32_t linear_tile = 0;
  1496. uint32_t transparency_mode = 0;
  1497. uint32_t in_premult = 0;
  1498. uint32_t src_premultiply_enable = 0;
  1499. uint32_t premul_flag = 0;
  1500. uint32_t prediv_flag = 0;
  1501. void *data;
  1502. /* The following code is from "draw path" */
  1503. uint32_t format, quality, tiling, fill;
  1504. uint32_t tessellation_size;
  1505. vg_lite_kernel_allocate_t memory;
  1506. vg_lite_kernel_free_t free_memory;
  1507. uint32_t return_offset = 0;
  1508. vg_lite_point_t point_min = {0}, point_max = {0}, temp = {0};
  1509. int x, y, width, height;
  1510. uint8_t ts_is_fullscreen = 0;
  1511. vg_lite_float_t dx, dy, dxdx_dydy;
  1512. vg_lite_float_t lg_step_x_lin, lg_step_y_lin, lg_constant_lin;
  1513. #if(CHIPID == 0X355)
  1514. uint8_t* path_re = NULL;
  1515. uint32_t index = 0;
  1516. #endif
  1517. #if gcFEATURE_VG_TRACE_API
  1518. VGLITE_LOG("vg_lite_draw_linear_grad %p %p %d %p %p 0x%08X %d %d\n",
  1519. target, path, fill_rule, path_matrix, grad, paint_color, blend, filter);
  1520. #endif
  1521. #if gcFEATURE_VG_ERROR_CHECK
  1522. #if !gcFEATURE_VG_LVGL_SUPPORT
  1523. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  1524. return VG_LITE_NOT_SUPPORT;
  1525. }
  1526. #endif
  1527. #if !gcFEATURE_VG_QUALITY_8X
  1528. if (path->quality == VG_LITE_UPPER) {
  1529. return VG_LITE_NOT_SUPPORT;
  1530. }
  1531. #endif
  1532. if (source->format == VG_LITE_A4 || source->format == VG_LITE_A8) {
  1533. return VG_LITE_NOT_SUPPORT;
  1534. }
  1535. if (!path || !path->path) {
  1536. return VG_LITE_INVALID_ARGUMENT;
  1537. }
  1538. #if (CHIPID == 0x355)
  1539. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  1540. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  1541. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  1542. printf("Target format: 0x%x is not supported.\n", target->format);
  1543. return VG_LITE_NOT_SUPPORT;
  1544. }
  1545. #endif
  1546. #endif /* gcFEATURE_VG_ERROR_CHECK */
  1547. if (!path_matrix) {
  1548. path_matrix = &identity_mtx;
  1549. }
  1550. #if gcFEATURE_VG_GAMMA
  1551. set_gamma_dest_only(target, VGL_TRUE);
  1552. #endif
  1553. /*blend input into context*/
  1554. s_context.blend_mode = blend;
  1555. src_premultiply_enable = 0x01000100;
  1556. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  1557. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  1558. prediv_flag = 0;
  1559. }
  1560. else {
  1561. prediv_flag = 1;
  1562. }
  1563. if ((s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE) {
  1564. premul_flag = 1;
  1565. }
  1566. else {
  1567. premul_flag = 0;
  1568. }
  1569. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  1570. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  1571. src_premultiply_enable = 0x01000100;
  1572. in_premult = 0x10000000;
  1573. }
  1574. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  1575. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  1576. src_premultiply_enable = 0x00000100;
  1577. in_premult = 0x00000000;
  1578. }
  1579. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  1580. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  1581. src_premultiply_enable = 0x01000100;
  1582. in_premult = 0x00000000;
  1583. }
  1584. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  1585. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  1586. src_premultiply_enable = 0x00000100;
  1587. in_premult = 0x00000000;
  1588. }
  1589. if ((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  1590. #if (CHIPID==0x255)
  1591. src_premultiply_enable = 0x00000000;
  1592. #endif
  1593. in_premult = 0x00000000;
  1594. }
  1595. if (blend == VG_LITE_BLEND_NORMAL_LVGL) {
  1596. in_premult = 0x00000000;
  1597. }
  1598. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  1599. target->apply_premult = 1;
  1600. }
  1601. else {
  1602. target->apply_premult = 0;
  1603. }
  1604. error = set_render_target(target);
  1605. if (error != VG_LITE_SUCCESS) {
  1606. return error;
  1607. } else if (error == VG_LITE_NO_CONTEXT) {
  1608. /* If scissoring is enabled and no valid scissoring rectangles
  1609. are present, no drawing occurs */
  1610. return VG_LITE_SUCCESS;
  1611. }
  1612. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  1613. width = s_context.tessbuf.tess_w_h & 0xFFFF;
  1614. height = s_context.tessbuf.tess_w_h >> 16;
  1615. get_format_bytes(target->format, &mul, &div, &align);
  1616. dst_align_width = target->stride * div / mul;
  1617. if (width == 0 || height == 0)
  1618. return VG_LITE_NO_CONTEXT;
  1619. if ((dst_align_width <= width) && (target->height <= height))
  1620. {
  1621. ts_is_fullscreen = 1;
  1622. point_min.x = 0;
  1623. point_min.y = 0;
  1624. point_max.x = dst_align_width;
  1625. point_max.y = target->height;
  1626. }
  1627. /* If target is L8 and source is in YUV or RGB (not L8 or A8) then we have to convert RGB into L8. */
  1628. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  1629. conversion = 0x80000000;
  1630. }
  1631. /* Determine image mode (NORMAL or MULTIPLY) depending on the color. */
  1632. image_mode = (source->image_mode == VG_LITE_NONE_IMAGE_MODE) ? 0 : (source->image_mode == VG_LITE_MULTIPLY_IMAGE_MODE) ? 0x00002000 : 0x00001000;
  1633. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  1634. switch (grad->spread_mode) {
  1635. case VG_LITE_GRADIENT_SPREAD_FILL:
  1636. linear_tile = 0x0;
  1637. break;
  1638. case VG_LITE_GRADIENT_SPREAD_PAD:
  1639. linear_tile = 0x1000;
  1640. break;
  1641. case VG_LITE_GRADIENT_SPREAD_REPEAT:
  1642. linear_tile = 0x2000;
  1643. break;
  1644. case VG_LITE_GRADIENT_SPREAD_REFLECT:
  1645. linear_tile = 0x3000;
  1646. break;
  1647. }
  1648. switch (filter) {
  1649. case VG_LITE_FILTER_POINT:
  1650. filter_mode = 0;
  1651. break;
  1652. case VG_LITE_FILTER_LINEAR:
  1653. filter_mode = 0x10000;
  1654. break;
  1655. case VG_LITE_FILTER_BI_LINEAR:
  1656. filter_mode = 0x20000;
  1657. break;
  1658. case VG_LITE_FILTER_GAUSSIAN:
  1659. filter_mode = 0x30000;
  1660. break;
  1661. }
  1662. if (grad->spread_mode == VG_LITE_GRADIENT_SPREAD_FILL)
  1663. {
  1664. uint8_t a,r,g,b;
  1665. a = paint_color >> 24;
  1666. r = paint_color >> 16;
  1667. g = paint_color >> 8;
  1668. b = paint_color;
  1669. paint_color = (a << 24) | (b << 16) | (g << 8) | r;
  1670. }
  1671. /* compute radial gradient paremeters */
  1672. /* Compute inverse matrix. */
  1673. if (!inverse(&inverse_matrix, matrix))
  1674. return VG_LITE_INVALID_ARGUMENT;
  1675. dx = grad->linear_grad.X1 - grad->linear_grad.X0;
  1676. dy = grad->linear_grad.Y1 - grad->linear_grad.Y0;
  1677. dxdx_dydy = dx * dx + dy * dy;
  1678. /*
  1679. ** dx (T(x) - x0) + dy (T(y) - y0)
  1680. ** g = -------------------------------
  1681. ** dx^2 + dy^2
  1682. **
  1683. ** where
  1684. **
  1685. ** dx := x1 - x0
  1686. ** dy := y1 - y1
  1687. ** T(x) := (x + 0.5) m00 + (y + 0.5) m01 + m02
  1688. ** = x m00 + y m01 + 0.5 (m00 + m01) + m02
  1689. ** T(y) := (x + 0.5) m10 + (y + 0.5) m11 + m12
  1690. ** = x m10 + y m11 + 0.5 (m10 + m11) + m12.
  1691. **
  1692. ** We can factor the top line into:
  1693. **
  1694. ** = dx (x m00 + y m01 + 0.5 (m00 + m01) + m02 - x0)
  1695. ** + dy (x m10 + y m11 + 0.5 (m10 + m11) + m12 - y0)
  1696. **
  1697. ** = x (dx m00 + dy m10)
  1698. ** + y (dx m01 + dy m11)
  1699. ** + dx (0.5 (m00 + m01) + m02 - x0)
  1700. ** + dy (0.5 (m10 + m11) + m12 - y0).
  1701. */
  1702. lg_step_x_lin
  1703. = (dx * MAT(&inverse_matrix, 0, 0) + dy * MAT(&inverse_matrix, 1, 0))
  1704. / dxdx_dydy;
  1705. lg_step_y_lin
  1706. = (dx * MAT(&inverse_matrix, 0, 1) + dy * MAT(&inverse_matrix, 1, 1))
  1707. / dxdx_dydy;
  1708. lg_constant_lin =
  1709. (
  1710. (
  1711. 0.5f * ( MAT(&inverse_matrix, 0, 0) + MAT(&inverse_matrix, 0, 1) )
  1712. + MAT(&inverse_matrix, 0, 2) - grad->linear_grad.X0
  1713. ) * dx
  1714. +
  1715. (
  1716. 0.5f * ( MAT(&inverse_matrix, 1, 0) + MAT(&inverse_matrix, 1, 1) )
  1717. + MAT(&inverse_matrix, 1, 2) - grad->linear_grad.Y0
  1718. ) * dy
  1719. )
  1720. / dxdx_dydy;
  1721. /* Setup the command buffer. */
  1722. /* linear gradient parameters*/
  1723. data = &lg_constant_lin;
  1724. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A04,*(uint32_t*) data));
  1725. data = &lg_step_x_lin;
  1726. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A06,*(uint32_t*) data));
  1727. data = &lg_step_y_lin;
  1728. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A08,*(uint32_t*) data));
  1729. VG_LITE_RETURN_ERROR(set_interpolation_steps(target, source->width, source->height, matrix));
  1730. /* enable pre-multiplied in image unit */
  1731. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source->format) |
  1732. filter_mode | linear_tile | conversion | src_premultiply_enable));
  1733. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, paint_color));
  1734. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source->address));
  1735. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, tiled_source));
  1736. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  1737. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source->width));
  1738. /* Work on path states. */
  1739. matrix = path_matrix;
  1740. if (ts_is_fullscreen == 0){
  1741. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], matrix);
  1742. point_min = point_max = temp;
  1743. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], matrix);
  1744. if (temp.x < point_min.x) point_min.x = temp.x;
  1745. if (temp.y < point_min.y) point_min.y = temp.y;
  1746. if (temp.x > point_max.x) point_max.x = temp.x;
  1747. if (temp.y > point_max.y) point_max.y = temp.y;
  1748. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], matrix);
  1749. if (temp.x < point_min.x) point_min.x = temp.x;
  1750. if (temp.y < point_min.y) point_min.y = temp.y;
  1751. if (temp.x > point_max.x) point_max.x = temp.x;
  1752. if (temp.y > point_max.y) point_max.y = temp.y;
  1753. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], matrix);
  1754. if (temp.x < point_min.x) point_min.x = temp.x;
  1755. if (temp.y < point_min.y) point_min.y = temp.y;
  1756. if (temp.x > point_max.x) point_max.x = temp.x;
  1757. if (temp.y > point_max.y) point_max.y = temp.y;
  1758. point_min.x = MAX(point_min.x, 0);
  1759. point_min.y = MAX(point_min.y, 0);
  1760. point_max.x = MIN(point_max.x, dst_align_width);
  1761. point_max.y = MIN(point_max.y, target->height);
  1762. if (s_context.scissor_set) {
  1763. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  1764. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  1765. point_max.x = MIN(point_max.x, s_context.scissor[0] + s_context.scissor[2]);
  1766. point_max.y = MIN(point_max.y, s_context.scissor[1] + s_context.scissor[3]);
  1767. }
  1768. }
  1769. /* Convert states into hardware values. */
  1770. blend_mode = convert_blend(blend);
  1771. format = convert_path_format(path->format);
  1772. quality = convert_path_quality(path->quality);
  1773. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  1774. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  1775. tessellation_size = s_context.tessbuf.L2_size ? s_context.tessbuf.L2_size : s_context.tessbuf.L1_size;
  1776. /* Setup the command buffer. */
  1777. /* Program color register. */
  1778. /* enable pre-multiplied from VG to VGPE */
  1779. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x01000002 | s_context.capabilities.cap.tiled | in_premult | image_mode | blend_mode | transparency_mode | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  1780. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000400 | format | quality | tiling | fill));
  1781. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  1782. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  1783. /* Program matrix. */
  1784. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &matrix->m[0][0]));
  1785. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &matrix->m[0][1]));
  1786. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &matrix->m[0][2]));
  1787. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &matrix->m[1][0]));
  1788. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &matrix->m[1][1]));
  1789. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &matrix->m[1][2]));
  1790. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1)
  1791. {
  1792. if (path->path_changed != 0) {
  1793. if (path->uploaded.handle != NULL) {
  1794. free_memory.memory_handle = path->uploaded.handle;
  1795. vg_lite_kernel(VG_LITE_FREE, &free_memory);
  1796. path->uploaded.address = 0;
  1797. path->uploaded.memory = NULL;
  1798. path->uploaded.handle = NULL;
  1799. }
  1800. /* Allocate memory for the path data. */
  1801. memory.bytes = 16 + VG_LITE_ALIGN(path->path_length, 8);
  1802. return_offset = (8 + VG_LITE_ALIGN(path->path_length, 8)) / 4;
  1803. memory.contiguous = 1;
  1804. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &memory));
  1805. ((uint64_t *) memory.memory)[(path->path_length + 7) / 8] = 0;
  1806. ((uint32_t *) memory.memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  1807. ((uint32_t *) memory.memory)[1] = 0;
  1808. memcpy((uint8_t *) memory.memory + 8, path->path, path->path_length);
  1809. ((uint32_t *) memory.memory)[return_offset] = VG_LITE_RETURN();
  1810. ((uint32_t *) memory.memory)[return_offset + 1] = 0;
  1811. path->uploaded.handle = memory.memory_handle;
  1812. path->uploaded.memory = memory.memory;
  1813. path->uploaded.address = memory.memory_gpu;
  1814. path->uploaded.bytes = memory.bytes;
  1815. path->path_changed = 0;
  1816. }
  1817. }
  1818. /* Setup tessellation loop. */
  1819. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO) {
  1820. for (y = point_min.y; y < point_max.y; y += height) {
  1821. for (x = point_min.x; x < point_max.x; x += width) {
  1822. /* Tessellate path. */
  1823. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  1824. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  1825. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  1826. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  1827. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  1828. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  1829. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  1830. }
  1831. else {
  1832. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  1833. }
  1834. }
  1835. }
  1836. }
  1837. /* Setup tessellation loop. */
  1838. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  1839. for (y = point_min.y; y < point_max.y; y += height) {
  1840. for (x = point_min.x; x < point_max.x; x += width) {
  1841. /* Tessellate path. */
  1842. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  1843. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  1844. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  1845. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  1846. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  1847. format = convert_path_format(VG_LITE_FP32);
  1848. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  1849. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  1850. if (VLM_PATH_STROKE_GET_UPLOAD_BIT(*path) == 1) {
  1851. VG_LITE_RETURN_ERROR(push_call(&s_context, path->stroke->uploaded.address, path->stroke->uploaded.bytes));
  1852. } else {
  1853. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  1854. }
  1855. }
  1856. }
  1857. }
  1858. /* Finialize command buffer. */
  1859. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  1860. return error;
  1861. }
  1862. /* GC355/GC255 vg_lite_draw_radial_grad API implementation
  1863. */
  1864. vg_lite_error_t vg_lite_draw_radial_grad(vg_lite_buffer_t * target,
  1865. vg_lite_path_t * path,
  1866. vg_lite_fill_t fill_rule,
  1867. vg_lite_matrix_t * path_matrix,
  1868. vg_lite_radial_gradient_t *grad,
  1869. vg_lite_color_t paint_color,
  1870. vg_lite_blend_t blend,
  1871. vg_lite_filter_t filter)
  1872. {
  1873. vg_lite_error_t error = VG_LITE_SUCCESS;
  1874. uint32_t imageMode;
  1875. uint32_t blend_mode;
  1876. uint32_t filter_mode = 0;
  1877. uint32_t conversion = 0;
  1878. uint32_t tiled_source;
  1879. int32_t dst_align_width;
  1880. uint32_t mul, div, align;
  1881. vg_lite_matrix_t inverse_matrix;
  1882. vg_lite_buffer_t * source = &grad->image;
  1883. vg_lite_matrix_t * matrix = &grad->matrix;
  1884. uint32_t rad_tile = 0;
  1885. uint32_t transparency_mode = 0;
  1886. uint32_t in_premult = 0;
  1887. uint32_t src_premultiply_enable = 0;
  1888. uint32_t premul_flag = 0;
  1889. uint32_t prediv_flag = 0;
  1890. void *data;
  1891. /* The following code is from "draw path" */
  1892. uint32_t format, quality, tiling, fill;
  1893. uint32_t tessellation_size;
  1894. vg_lite_kernel_allocate_t memory;
  1895. vg_lite_kernel_free_t free_memory;
  1896. uint32_t return_offset = 0;
  1897. vg_lite_point_t point_min = {0}, point_max = {0}, temp = {0};
  1898. int x, y, width, height;
  1899. uint8_t ts_is_fullscreen = 0;
  1900. vg_lite_float_t radius;
  1901. vg_lite_float_t centerX, centerY;
  1902. vg_lite_float_t focalX, focalY;
  1903. vg_lite_float_t fx, fy;
  1904. vg_lite_float_t fxfy_2;
  1905. vg_lite_float_t radius2;
  1906. vg_lite_float_t r2_fx2, r2_fy2;
  1907. vg_lite_float_t r2_fx2_2, r2_fy2_2;
  1908. vg_lite_float_t r2_fx2_fy2;
  1909. vg_lite_float_t r2_fx2_fy2sq;
  1910. vg_lite_float_t cx, cy;
  1911. vg_lite_float_t rgConstantLin, rgStepXLin, rgStepYLin;
  1912. vg_lite_float_t rgConstantRad, rgStepXRad, rgStepYRad;
  1913. vg_lite_float_t rgStepXXRad, rgStepYYRad, rgStepXYRad;
  1914. #if(CHIPID == 0X355)
  1915. uint8_t* path_re = NULL;
  1916. uint32_t index = 0;
  1917. #endif
  1918. #if gcFEATURE_VG_TRACE_API
  1919. VGLITE_LOG("vg_lite_draw_radial_grad %p %p %d %p %p 0x%08X %d %d\n",
  1920. target, path, fill_rule, path_matrix, grad, paint_color, blend, filter);
  1921. #endif
  1922. #if gcFEATURE_VG_ERROR_CHECK
  1923. #if !gcFEATURE_VG_LVGL_SUPPORT
  1924. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  1925. return VG_LITE_NOT_SUPPORT;
  1926. }
  1927. #endif
  1928. #if !gcFEATURE_VG_QUALITY_8X
  1929. if (path->quality == VG_LITE_UPPER) {
  1930. return VG_LITE_NOT_SUPPORT;
  1931. }
  1932. #endif
  1933. if (source->format == VG_LITE_A4 || source->format == VG_LITE_A8) {
  1934. return VG_LITE_NOT_SUPPORT;
  1935. }
  1936. if (!path || !path->path) {
  1937. return VG_LITE_INVALID_ARGUMENT;
  1938. }
  1939. #if (CHIPID == 0x355)
  1940. if (target->format == VG_LITE_L8 || target->format == VG_LITE_YUYV ||
  1941. target->format == VG_LITE_BGRA2222 || target->format == VG_LITE_RGBA2222 ||
  1942. target->format == VG_LITE_ABGR2222 || target->format == VG_LITE_ARGB2222) {
  1943. printf("Target format: 0x%x is not supported.\n", target->format);
  1944. return VG_LITE_NOT_SUPPORT;
  1945. }
  1946. #endif
  1947. radius = grad->radial_grad.r;
  1948. if (radius < 0) {
  1949. return VG_LITE_INVALID_ARGUMENT;
  1950. }
  1951. VG_LITE_RETURN_ERROR(check_compress(source->format, source->compress_mode, source->tiled, source->width, source->height));
  1952. #endif /* gcFEATURE_VG_ERROR_CHECK */
  1953. if (!path->path_length) {
  1954. return VG_LITE_SUCCESS;
  1955. }
  1956. if (!path_matrix) {
  1957. path_matrix = &identity_mtx;
  1958. }
  1959. #if gcFEATURE_VG_GAMMA
  1960. set_gamma_dest_only(target, VGL_TRUE);
  1961. #endif
  1962. /*blend input into context*/
  1963. s_context.blend_mode = blend;
  1964. src_premultiply_enable = 0x01000100;
  1965. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  1966. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  1967. prediv_flag = 0;
  1968. }
  1969. else {
  1970. prediv_flag = 1;
  1971. }
  1972. if ((s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE) {
  1973. premul_flag = 1;
  1974. }
  1975. else {
  1976. premul_flag = 0;
  1977. }
  1978. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  1979. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  1980. src_premultiply_enable = 0x01000100;
  1981. in_premult = 0x10000000;
  1982. }
  1983. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  1984. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  1985. src_premultiply_enable = 0x00000100;
  1986. in_premult = 0x00000000;
  1987. }
  1988. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  1989. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  1990. src_premultiply_enable = 0x01000100;
  1991. in_premult = 0x00000000;
  1992. }
  1993. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  1994. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  1995. src_premultiply_enable = 0x00000100;
  1996. in_premult = 0x00000000;
  1997. }
  1998. if ((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  1999. #if (CHIPID==0x255)
  2000. src_premultiply_enable = 0x00000000;
  2001. #endif
  2002. in_premult = 0x00000000;
  2003. }
  2004. if (blend == VG_LITE_BLEND_NORMAL_LVGL) {
  2005. in_premult = 0x00000000;
  2006. }
  2007. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  2008. target->apply_premult = 1;
  2009. }
  2010. else {
  2011. target->apply_premult = 0;
  2012. }
  2013. error = set_render_target(target);
  2014. if (error != VG_LITE_SUCCESS) {
  2015. return error;
  2016. } else if (error == VG_LITE_NO_CONTEXT) {
  2017. /* If scissoring is enabled and no valid scissoring rectangles
  2018. are present, no drawing occurs */
  2019. return VG_LITE_SUCCESS;
  2020. }
  2021. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  2022. width = s_context.tessbuf.tess_w_h & 0xFFFF;
  2023. height = s_context.tessbuf.tess_w_h >> 16;
  2024. get_format_bytes(target->format, &mul, &div, &align);
  2025. dst_align_width = target->stride * div / mul;
  2026. if (width == 0 || height == 0)
  2027. return VG_LITE_NO_CONTEXT;
  2028. if ((dst_align_width <= width) && (target->height <= height))
  2029. {
  2030. ts_is_fullscreen = 1;
  2031. point_min.x = 0;
  2032. point_min.y = 0;
  2033. point_max.x = dst_align_width;
  2034. point_max.y = target->height;
  2035. }
  2036. /* If target is L8 and source is in YUV or RGB (not L8 or A8) then we have to convert RGB into L8. */
  2037. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  2038. conversion = 0x80000000;
  2039. }
  2040. /* Determine image mode (NORMAL or MULTIPLY) depending on the color. */
  2041. imageMode = (source->image_mode == VG_LITE_NONE_IMAGE_MODE) ? 0 : (source->image_mode == VG_LITE_MULTIPLY_IMAGE_MODE) ? 0x00002000 : 0x00001000;
  2042. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  2043. switch (grad->spread_mode) {
  2044. case VG_LITE_GRADIENT_SPREAD_FILL:
  2045. rad_tile = 0x0;
  2046. break;
  2047. case VG_LITE_GRADIENT_SPREAD_PAD:
  2048. rad_tile = 0x1000;
  2049. break;
  2050. case VG_LITE_GRADIENT_SPREAD_REPEAT:
  2051. rad_tile = 0x2000;
  2052. break;
  2053. case VG_LITE_GRADIENT_SPREAD_REFLECT:
  2054. rad_tile = 0x3000;
  2055. break;
  2056. }
  2057. switch (filter) {
  2058. case VG_LITE_FILTER_POINT:
  2059. filter_mode = 0;
  2060. break;
  2061. case VG_LITE_FILTER_LINEAR:
  2062. filter_mode = 0x10000;
  2063. break;
  2064. case VG_LITE_FILTER_BI_LINEAR:
  2065. filter_mode = 0x20000;
  2066. break;
  2067. case VG_LITE_FILTER_GAUSSIAN:
  2068. filter_mode = 0x30000;
  2069. break;
  2070. }
  2071. if (grad->spread_mode == VG_LITE_GRADIENT_SPREAD_FILL)
  2072. {
  2073. uint8_t a,r,g,b;
  2074. a = paint_color >> 24;
  2075. r = paint_color >> 16;
  2076. g = paint_color >> 8;
  2077. b = paint_color;
  2078. paint_color = (a << 24) | (b << 16) | (g << 8) | r;
  2079. }
  2080. /* compute radial gradient paremeters */
  2081. /* Compute inverse matrix. */
  2082. if (!inverse(&inverse_matrix, matrix))
  2083. return VG_LITE_INVALID_ARGUMENT;
  2084. /* Make shortcuts to the gradient information. */
  2085. centerX = grad->radial_grad.cx;
  2086. centerY = grad->radial_grad.cy;
  2087. focalX = grad->radial_grad.fx;
  2088. focalY = grad->radial_grad.fy;
  2089. /* Compute constants of the equation. */
  2090. fx = focalX - centerX;
  2091. fy = focalY - centerY;
  2092. radius2 = radius * radius;
  2093. if (fx*fx + fy*fy > radius2)
  2094. {
  2095. /* If the focal point is outside the circle, let's move it
  2096. to inside the circle. Per vg11 spec pg125 "If (fx, fy) lies outside ...
  2097. For here, we set it at 0.9 ratio to the center.
  2098. */
  2099. vg_lite_float_t fr = (vg_lite_float_t)sqrt(fx*fx + fy*fy);
  2100. fx = radius * fx / fr * 0.9f;
  2101. fy = radius * fy / fr * 0.9f;
  2102. focalX = grad->radial_grad.fx + fx;
  2103. focalY = grad->radial_grad.fy + fy;
  2104. }
  2105. fxfy_2 = 2.0f * fx * fy;
  2106. r2_fx2 = radius2 - fx * fx;
  2107. r2_fy2 = radius2 - fy * fy;
  2108. r2_fx2_2 = 2.0f * r2_fx2;
  2109. r2_fy2_2 = 2.0f * r2_fy2;
  2110. r2_fx2_fy2 = r2_fx2 - fy * fy;
  2111. r2_fx2_fy2sq = r2_fx2_fy2 * r2_fx2_fy2;
  2112. /* _____________________________________
  2113. ** dx fx + dy fy + \/r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  2114. ** g = -------------------------------------------------------
  2115. ** r^2 - fx^2 - fy^2
  2116. **
  2117. ** Where
  2118. **
  2119. ** dx := F(x) - focalX
  2120. ** dy := F(y) - focalY
  2121. ** fx := focalX - centerX
  2122. ** fy := focalX - centerY
  2123. **
  2124. ** and
  2125. **
  2126. ** F(x) := (x + 0.5) m00 + (y + 0.5) m01 + m02
  2127. ** F(y) := (x + 0.5) m10 + (y + 0.5) m11 + m12
  2128. **
  2129. ** So, dx can be factored into
  2130. **
  2131. ** dx = (x + 0.5) m00 + (y + 0.5) m01 + m02 - focalX
  2132. ** = x m00 + y m01 + 0.5 m00 + 0.5 m01 + m02 - focalX
  2133. **
  2134. ** = x m00 + y m01 + cx
  2135. **
  2136. ** where
  2137. **
  2138. ** cx := 0.5 m00 + 0.5 m01 + m02 - focalX
  2139. **
  2140. ** The same way we can factor dy into
  2141. **
  2142. ** dy = x m10 + y m11 + cy
  2143. **
  2144. ** where
  2145. **
  2146. ** cy := 0.5 m10 + 0.5 m11 + m12 - focalY.
  2147. **
  2148. ** Now we can rewrite g as
  2149. ** ______________________________________
  2150. ** dx fx + dy fy / r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  2151. ** g = ----------------- + \ / -------------------------------------
  2152. ** r^2 - fx^2 - fy^2 \/ (r^2 - fx^2 - fy^2)^2
  2153. ** ____
  2154. ** = gLin + \/gRad
  2155. **
  2156. ** where
  2157. **
  2158. ** dx fx + dy fy
  2159. ** gLin := -----------------
  2160. ** r^2 - fx^2 - fy^2
  2161. **
  2162. ** r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  2163. ** gRad := -------------------------------------
  2164. ** (r^2 - fx^2 - fy^2)^2
  2165. */
  2166. cx
  2167. = 0.5f * ( MAT(&inverse_matrix, 0, 0) + MAT(&inverse_matrix, 0, 1) )
  2168. + MAT(&inverse_matrix, 0, 2)
  2169. - focalX;
  2170. cy
  2171. = 0.5f * ( MAT(&inverse_matrix, 1, 0) + MAT(&inverse_matrix, 1, 1) )
  2172. + MAT(&inverse_matrix, 1, 2)
  2173. - focalY;
  2174. /*
  2175. ** dx fx + dy fy
  2176. ** gLin := -----------------
  2177. ** r^2 - fx^2 - fy^2
  2178. **
  2179. ** We can factor the top half into
  2180. **
  2181. ** = (x m00 + y m01 + cx) fx + (x m10 + y m11 + cy) fy
  2182. **
  2183. ** = x (m00 fx + m10 fy)
  2184. ** + y (m01 fx + m11 fy)
  2185. ** + cx fx + cy fy.
  2186. */
  2187. rgStepXLin
  2188. = ( MAT(&inverse_matrix, 0, 0) * fx + MAT(&inverse_matrix, 1, 0) * fy )
  2189. / r2_fx2_fy2;
  2190. rgStepYLin
  2191. = ( MAT(&inverse_matrix, 0, 1) * fx + MAT(&inverse_matrix, 1, 1) * fy )
  2192. / r2_fx2_fy2;
  2193. rgConstantLin = ( cx * fx + cy * fy ) / r2_fx2_fy2;
  2194. /*
  2195. ** r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  2196. ** gRad := -------------------------------------
  2197. ** (r^2 - fx^2 - fy^2)^2
  2198. **
  2199. ** r^2 (dx^2 + dy^2) - dx^2 fy^2 - dy^2 fx^2 + 2 dx dy fx fy
  2200. ** := ---------------------------------------------------------
  2201. ** (r^2 - fx^2 - fy^2)^2
  2202. **
  2203. ** dx^2 (r^2 - fy^2) + dy^2 (r^2 - fx^2) + 2 dx dy fx fy
  2204. ** := -----------------------------------------------------
  2205. ** (r^2 - fx^2 - fy^2)^2
  2206. **
  2207. ** First, lets factor dx^2 into
  2208. **
  2209. ** dx^2 = (x m00 + y m01 + cx)^2
  2210. ** = x^2 m00^2 + y^2 m01^2 + 2 x y m00 m01
  2211. ** + 2 x m00 cx + 2 y m01 cx + cx^2
  2212. **
  2213. ** = x^2 (m00^2)
  2214. ** + y^2 (m01^2)
  2215. ** + x y (2 m00 m01)
  2216. ** + x (2 m00 cx)
  2217. ** + y (2 m01 cx)
  2218. ** + cx^2.
  2219. **
  2220. ** The same can be done for dy^2:
  2221. **
  2222. ** dy^2 = x^2 (m10^2)
  2223. ** + y^2 (m11^2)
  2224. ** + x y (2 m10 m11)
  2225. ** + x (2 m10 cy)
  2226. ** + y (2 m11 cy)
  2227. ** + cy^2.
  2228. **
  2229. ** Let's also factor dx dy into
  2230. **
  2231. ** dx dy = (x m00 + y m01 + cx) (x m10 + y m11 + cy)
  2232. ** = x^2 m00 m10 + y^2 m01 m11 + x y m00 m11 + x y m01 m10
  2233. ** + x m00 cy + x m10 cx + y m01 cy + y m11 cx + cx cy
  2234. **
  2235. ** = x^2 (m00 m10)
  2236. ** + y^2 (m01 m11)
  2237. ** + x y (m00 m11 + m01 m10)
  2238. ** + x (m00 cy + m10 cx)
  2239. ** + y (m01 cy + m11 cx)
  2240. ** + cx cy.
  2241. **
  2242. ** Now that we have all this, lets look at the top of gRad.
  2243. **
  2244. ** = dx^2 (r^2 - fy^2) + dy^2 (r^2 - fx^2) + 2 dx dy fx fy
  2245. ** = x^2 m00^2 (r^2 - fy^2) + y^2 m01^2 (r^2 - fy^2)
  2246. ** + x y 2 m00 m01 (r^2 - fy^2) + x 2 m00 cx (r^2 - fy^2)
  2247. ** + y 2 m01 cx (r^2 - fy^2) + cx^2 (r^2 - fy^2)
  2248. ** + x^2 m10^2 (r^2 - fx^2) + y^2 m11^2 (r^2 - fx^2)
  2249. ** + x y 2 m10 m11 (r^2 - fx^2) + x 2 m10 cy (r^2 - fx^2)
  2250. ** + y 2 m11 cy (r^2 - fx^2) + cy^2 (r^2 - fx^2)
  2251. ** + x^2 m00 m10 2 fx fy + y^2 m01 m11 2 fx fy
  2252. ** + x y (m00 m11 + m01 m10) 2 fx fy
  2253. ** + x (m00 cy + m10 cx) 2 fx fy + y (m01 cy + m11 cx) 2 fx fy
  2254. ** + cx cy 2 fx fy
  2255. **
  2256. ** = x^2 ( m00^2 (r^2 - fy^2)
  2257. ** + m10^2 (r^2 - fx^2)
  2258. ** + m00 m10 2 fx fy
  2259. ** )
  2260. ** + y^2 ( m01^2 (r^2 - fy^2)
  2261. ** + m11^2 (r^2 - fx^2)
  2262. ** + m01 m11 2 fx fy
  2263. ** )
  2264. ** + x y ( 2 m00 m01 (r^2 - fy^2)
  2265. ** + 2 m10 m11 (r^2 - fx^2)
  2266. ** + (m00 m11 + m01 m10) 2 fx fy
  2267. ** )
  2268. ** + x ( 2 m00 cx (r^2 - fy^2)
  2269. ** + 2 m10 cy (r^2 - fx^2)
  2270. ** + (m00 cy + m10 cx) 2 fx fy
  2271. ** )
  2272. ** + y ( 2 m01 cx (r^2 - fy^2)
  2273. ** + 2 m11 cy (r^2 - fx^2)
  2274. ** + (m01 cy + m11 cx) 2 fx fy
  2275. ** )
  2276. ** + cx^2 (r^2 - fy^2) + cy^2 (r^2 - fx^2) + cx cy 2 fx fy.
  2277. */
  2278. rgStepXXRad =
  2279. (
  2280. MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 0, 0) * r2_fy2
  2281. + MAT(&inverse_matrix, 1, 0) * MAT(&inverse_matrix, 1, 0) * r2_fx2
  2282. + MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 1, 0) * fxfy_2
  2283. )
  2284. / r2_fx2_fy2sq;
  2285. rgStepYYRad =
  2286. (
  2287. MAT(&inverse_matrix, 0, 1) * MAT(&inverse_matrix, 0, 1) * r2_fy2
  2288. + MAT(&inverse_matrix, 1, 1) * MAT(&inverse_matrix, 1, 1) * r2_fx2
  2289. + MAT(&inverse_matrix, 0, 1) * MAT(&inverse_matrix, 1, 1) * fxfy_2
  2290. )
  2291. / r2_fx2_fy2sq;
  2292. rgStepXYRad =
  2293. (
  2294. MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 0, 1) * r2_fy2_2
  2295. + MAT(&inverse_matrix, 1, 0) * MAT(&inverse_matrix, 1, 1) * r2_fx2_2
  2296. + (
  2297. MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 1, 1)
  2298. + MAT(&inverse_matrix, 0, 1) * MAT(&inverse_matrix, 1, 0)
  2299. )
  2300. * fxfy_2
  2301. )
  2302. / r2_fx2_fy2sq;
  2303. rgStepXRad =
  2304. (
  2305. MAT(&inverse_matrix, 0, 0) * cx * r2_fy2_2
  2306. + MAT(&inverse_matrix, 1, 0) * cy * r2_fx2_2
  2307. + (
  2308. MAT(&inverse_matrix, 0, 0) * cy
  2309. + MAT(&inverse_matrix, 1, 0) * cx
  2310. )
  2311. * fxfy_2
  2312. )
  2313. / r2_fx2_fy2sq;
  2314. rgStepYRad =
  2315. (
  2316. MAT(&inverse_matrix, 0, 1) * cx * r2_fy2_2
  2317. + MAT(&inverse_matrix, 1, 1) * cy * r2_fx2_2
  2318. + (
  2319. MAT(&inverse_matrix, 0, 1) * cy
  2320. + MAT(&inverse_matrix, 1, 1) * cx
  2321. )
  2322. * fxfy_2
  2323. )
  2324. / r2_fx2_fy2sq;
  2325. rgConstantRad =
  2326. (
  2327. cx * cx * r2_fy2
  2328. + cy * cy * r2_fx2
  2329. + cx * cy * fxfy_2
  2330. )
  2331. / r2_fx2_fy2sq;
  2332. /* Setup the command buffer. */
  2333. data = &rgConstantLin;
  2334. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A04,*(uint32_t*) data));
  2335. data = &rgStepXLin;
  2336. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A06,*(uint32_t*) data));
  2337. data = &rgStepYLin;
  2338. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A08,*(uint32_t*) data));
  2339. data = &rgConstantRad;
  2340. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A05,*(uint32_t*) data));
  2341. data = &rgStepXRad;
  2342. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A07,*(uint32_t*) data));
  2343. data = &rgStepYRad;
  2344. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A09,*(uint32_t*) data));
  2345. data = &rgStepXXRad;
  2346. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A03,*(uint32_t*) data));
  2347. data = &rgStepYYRad;
  2348. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A0A,*(uint32_t*) data));
  2349. data = &rgStepXYRad;
  2350. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A0B,*(uint32_t*) data));
  2351. VG_LITE_RETURN_ERROR(set_interpolation_steps(target, source->width, source->height, matrix));
  2352. /* enable pre-multiplied in image unit */
  2353. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source->format) |
  2354. filter_mode | rad_tile | conversion | src_premultiply_enable));
  2355. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, paint_color));
  2356. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source->address));
  2357. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, tiled_source));
  2358. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  2359. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source->width));
  2360. /* Work on path states. */
  2361. matrix = path_matrix;
  2362. if (ts_is_fullscreen == 0){
  2363. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], matrix);
  2364. point_min = point_max = temp;
  2365. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], matrix);
  2366. if (temp.x < point_min.x) point_min.x = temp.x;
  2367. if (temp.y < point_min.y) point_min.y = temp.y;
  2368. if (temp.x > point_max.x) point_max.x = temp.x;
  2369. if (temp.y > point_max.y) point_max.y = temp.y;
  2370. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], matrix);
  2371. if (temp.x < point_min.x) point_min.x = temp.x;
  2372. if (temp.y < point_min.y) point_min.y = temp.y;
  2373. if (temp.x > point_max.x) point_max.x = temp.x;
  2374. if (temp.y > point_max.y) point_max.y = temp.y;
  2375. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], matrix);
  2376. if (temp.x < point_min.x) point_min.x = temp.x;
  2377. if (temp.y < point_min.y) point_min.y = temp.y;
  2378. if (temp.x > point_max.x) point_max.x = temp.x;
  2379. if (temp.y > point_max.y) point_max.y = temp.y;
  2380. point_min.x = MAX(point_min.x, 0);
  2381. point_min.y = MAX(point_min.y, 0);
  2382. point_max.x = MIN(point_max.x, dst_align_width);
  2383. point_max.y = MIN(point_max.y, target->height);
  2384. if (s_context.scissor_set) {
  2385. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  2386. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  2387. point_max.x = MIN(point_max.x, s_context.scissor[0] + s_context.scissor[2]);
  2388. point_max.y = MIN(point_max.y, s_context.scissor[1] + s_context.scissor[3]);
  2389. }
  2390. }
  2391. /* Convert states into hardware values. */
  2392. blend_mode = convert_blend(blend);
  2393. format = convert_path_format(path->format);
  2394. quality = convert_path_quality(path->quality);
  2395. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  2396. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  2397. tessellation_size = s_context.tessbuf.L2_size ? s_context.tessbuf.L2_size : s_context.tessbuf.L1_size;
  2398. /* Setup the command buffer. */
  2399. /* Program color register. */
  2400. /* enable pre-multiplied from VG to VGPE */
  2401. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x02000002 | s_context.capabilities.cap.tiled | in_premult | imageMode | blend_mode | transparency_mode | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  2402. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000400 | format | quality | tiling | fill));
  2403. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  2404. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  2405. /* Program matrix. */
  2406. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &matrix->m[0][0]));
  2407. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &matrix->m[0][1]));
  2408. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &matrix->m[0][2]));
  2409. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &matrix->m[1][0]));
  2410. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &matrix->m[1][1]));
  2411. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &matrix->m[1][2]));
  2412. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1)
  2413. {
  2414. if (path->path_changed != 0) {
  2415. if (path->uploaded.handle != NULL) {
  2416. free_memory.memory_handle = path->uploaded.handle;
  2417. vg_lite_kernel(VG_LITE_FREE, &free_memory);
  2418. path->uploaded.address = 0;
  2419. path->uploaded.memory = NULL;
  2420. path->uploaded.handle = NULL;
  2421. }
  2422. /* Allocate memory for the path data. */
  2423. memory.bytes = 16 + VG_LITE_ALIGN(path->path_length, 8);
  2424. return_offset = (8 + VG_LITE_ALIGN(path->path_length, 8)) / 4;
  2425. memory.contiguous = 1;
  2426. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &memory));
  2427. ((uint64_t *) memory.memory)[(path->path_length + 7) / 8] = 0;
  2428. ((uint32_t *) memory.memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  2429. ((uint32_t *) memory.memory)[1] = 0;
  2430. memcpy((uint8_t *) memory.memory + 8, path->path, path->path_length);
  2431. ((uint32_t *) memory.memory)[return_offset] = VG_LITE_RETURN();
  2432. ((uint32_t *) memory.memory)[return_offset + 1] = 0;
  2433. path->uploaded.handle = memory.memory_handle;
  2434. path->uploaded.memory = memory.memory;
  2435. path->uploaded.address = memory.memory_gpu;
  2436. path->uploaded.bytes = memory.bytes;
  2437. path->path_changed = 0;
  2438. }
  2439. }
  2440. /* Setup tessellation loop. */
  2441. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  2442. for (y = point_min.y; y < point_max.y; y += height) {
  2443. for (x = point_min.x; x < point_max.x; x += width) {
  2444. /* Tessellate path. */
  2445. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  2446. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  2447. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  2448. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  2449. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  2450. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  2451. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  2452. }
  2453. else {
  2454. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  2455. }
  2456. }
  2457. }
  2458. }
  2459. /* Setup tessellation loop. */
  2460. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  2461. for (y = point_min.y; y < point_max.y; y += height) {
  2462. for (x = point_min.x; x < point_max.x; x += width) {
  2463. /* Tessellate path. */
  2464. VG_LITE_RETURN_ERROR(push_stall(&s_context, 15));
  2465. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  2466. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A01, x | (y << 16)));
  2467. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, x | (y << 16)));
  2468. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  2469. format = convert_path_format(VG_LITE_FP32);
  2470. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  2471. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  2472. if (VLM_PATH_STROKE_GET_UPLOAD_BIT(*path) == 1) {
  2473. VG_LITE_RETURN_ERROR(push_call(&s_context, path->stroke->uploaded.address, path->stroke->uploaded.bytes));
  2474. } else {
  2475. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  2476. }
  2477. }
  2478. }
  2479. }
  2480. /* Finialize command buffer. */
  2481. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  2482. return error;
  2483. }
  2484. #else /* (CHIPID==0x355 || CHIPID==0x255) */
  2485. /* GC555 vg_lite_draw API implementation
  2486. */
  2487. vg_lite_error_t vg_lite_draw(vg_lite_buffer_t* target,
  2488. vg_lite_path_t* path,
  2489. vg_lite_fill_t fill_rule,
  2490. vg_lite_matrix_t* matrix,
  2491. vg_lite_blend_t blend,
  2492. vg_lite_color_t color)
  2493. {
  2494. #if DUMP_API
  2495. FUNC_DUMP(vg_lite_draw)(target, path, fill_rule, matrix, blend, color);
  2496. #endif
  2497. uint32_t blend_mode;
  2498. uint32_t format, quality, tiling, fill;
  2499. uint32_t tessellation_size;
  2500. vg_lite_error_t error;
  2501. vg_lite_point_t point_min = { 0 }, point_max = { 0 }, temp = { 0 };
  2502. int width, height;
  2503. uint8_t ts_is_fullscreen = 0;
  2504. uint32_t return_offset = 0;
  2505. vg_lite_kernel_free_t free_memory;
  2506. vg_lite_kernel_allocate_t memory;
  2507. float new_matrix[6];
  2508. float scale, bias;
  2509. uint32_t tile_setting = 0;
  2510. uint32_t in_premult = 0;
  2511. uint32_t premul_flag = 0;
  2512. #if (!gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_512_PARALLEL_PATHS)
  2513. uint32_t parallel_workpaths1 = 2;
  2514. uint32_t parallel_workpaths2 = 2;
  2515. #endif
  2516. #if (!gcFEATURE_VG_SPLIT_PATH || !gcFEATURE_VG_PARALLEL_PATHS || !gcFEATURE_VG_512_PARALLEL_PATHS)
  2517. int32_t y = 0;
  2518. uint32_t par_height = 0;
  2519. int32_t next_boundary = 0;
  2520. #endif
  2521. #if gcFEATURE_VG_TRACE_API
  2522. VGLITE_LOG("vg_lite_draw %p %p %d %p %d 0x%08X\n", target, path, fill_rule, matrix, blend, color);
  2523. VGLITE_LOG(" path_type %d, path_length %d, stroke_size %d\n", path->path_type, path->path_length, path->stroke_size);
  2524. #endif
  2525. #if gcFEATURE_VG_ERROR_CHECK
  2526. #if !gcFEATURE_VG_QUALITY_8X
  2527. if (path->quality == VG_LITE_UPPER) {
  2528. return VG_LITE_NOT_SUPPORT;
  2529. }
  2530. #endif
  2531. #if !gcFEATURE_VG_24BIT
  2532. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  2533. return VG_LITE_NOT_SUPPORT;
  2534. }
  2535. #endif
  2536. #if !gcFEATURE_VG_NEW_BLEND_MODE
  2537. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  2538. return VG_LITE_NOT_SUPPORT;
  2539. }
  2540. #endif
  2541. if (!path || !path->path) {
  2542. return VG_LITE_INVALID_ARGUMENT;
  2543. }
  2544. #endif /* gcFEATURE_VG_ERROR_CHECK */
  2545. if (!path->path_length) {
  2546. return VG_LITE_SUCCESS;
  2547. }
  2548. if (!matrix) {
  2549. matrix = &identity_mtx;
  2550. }
  2551. #if gcFEATURE_VG_GAMMA
  2552. set_gamma_dest_only(target, VGL_FALSE);
  2553. #endif
  2554. #if gcFEATURE_VG_GLOBAL_ALPHA
  2555. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  2556. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_GLOBAL, 0xff));
  2557. }
  2558. #endif
  2559. /*blend input into context*/
  2560. s_context.blend_mode = blend;
  2561. /* Adjust premultiply setting according to openvg condition */
  2562. target->apply_premult = 0;
  2563. premul_flag = (s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE)
  2564. ||(s_context.blend_mode >= VG_LITE_BLEND_NORMAL_LVGL && s_context.blend_mode <= VG_LITE_BLEND_MULTIPLY_LVGL);
  2565. if (target->premultiplied == 0 && premul_flag == 0) {
  2566. in_premult = 0x10000000;
  2567. target->apply_premult = 1;
  2568. }
  2569. else if ((target->premultiplied == 1) ||
  2570. (target->premultiplied == 0 && premul_flag == 1)) {
  2571. in_premult = 0x00000000;
  2572. }
  2573. error = set_render_target(target);
  2574. if (error != VG_LITE_SUCCESS) {
  2575. return error;
  2576. }
  2577. if ((target->format == VG_LITE_YUYV || target->format == VG_LITE_YUY2 || target->format == VG_LITE_YUY2_TILED
  2578. || target->format == VG_LITE_AYUY2 || target->format == VG_LITE_AYUY2_TILED)
  2579. && path->quality != VG_LITE_LOW)
  2580. {
  2581. path->quality = VG_LITE_LOW;
  2582. printf("If target is YUV group , the path qulity should use VG_LITE_LOW.\n");
  2583. }
  2584. width = target->width;
  2585. height = target->height;
  2586. if (s_context.scissor_set) {
  2587. width = s_context.scissor[2] - s_context.scissor[0];
  2588. height = s_context.scissor[3] - s_context.scissor[1];
  2589. }
  2590. if (width == 0 || height == 0)
  2591. return VG_LITE_NO_CONTEXT;
  2592. if ((target->width <= width) && (target->height <= height) && (!s_context.scissor_set))
  2593. {
  2594. ts_is_fullscreen = 1;
  2595. point_min.x = 0;
  2596. point_min.y = 0;
  2597. point_max.x = target->width;
  2598. point_max.y = target->height;
  2599. }
  2600. if (ts_is_fullscreen == 0) {
  2601. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], matrix);
  2602. point_min = point_max = temp;
  2603. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], matrix);
  2604. if (temp.x < point_min.x) point_min.x = temp.x;
  2605. if (temp.y < point_min.y) point_min.y = temp.y;
  2606. if (temp.x > point_max.x) point_max.x = temp.x;
  2607. if (temp.y > point_max.y) point_max.y = temp.y;
  2608. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], matrix);
  2609. if (temp.x < point_min.x) point_min.x = temp.x;
  2610. if (temp.y < point_min.y) point_min.y = temp.y;
  2611. if (temp.x > point_max.x) point_max.x = temp.x;
  2612. if (temp.y > point_max.y) point_max.y = temp.y;
  2613. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], matrix);
  2614. if (temp.x < point_min.x) point_min.x = temp.x;
  2615. if (temp.y < point_min.y) point_min.y = temp.y;
  2616. if (temp.x > point_max.x) point_max.x = temp.x;
  2617. if (temp.y > point_max.y) point_max.y = temp.y;
  2618. if (point_min.x < 0) point_min.x = 0;
  2619. if (point_min.y < 0) point_min.y = 0;
  2620. if (point_max.x > target->width) point_max.x = target->width;
  2621. if (point_max.y > target->height) point_max.y = target->height;
  2622. if (s_context.scissor_set) {
  2623. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  2624. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  2625. point_max.x = MIN(point_max.x, s_context.scissor[2]);
  2626. point_max.y = MIN(point_max.y, s_context.scissor[3]);
  2627. }
  2628. }
  2629. width = point_max.x - point_min.x;
  2630. height = point_max.y - point_min.y;
  2631. scale = 1.0f;
  2632. bias = 0.0f;
  2633. new_matrix[0] = matrix->m[0][0] * scale;
  2634. new_matrix[1] = matrix->m[0][1] * scale;
  2635. new_matrix[2] = (matrix->m[0][0] + matrix->m[0][1]) * bias + matrix->m[0][2];
  2636. new_matrix[3] = matrix->m[1][0] * scale;
  2637. new_matrix[4] = matrix->m[1][1] * scale;
  2638. new_matrix[5] = (matrix->m[1][0] + matrix->m[1][1]) * bias + matrix->m[1][2];
  2639. /* Convert states into hardware values. */
  2640. blend_mode = convert_blend(blend);
  2641. format = convert_path_format(path->format);
  2642. quality = convert_path_quality(path->quality);
  2643. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  2644. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  2645. tessellation_size = s_context.tessbuf.tessbuf_size;
  2646. #if gcFEATURE_VG_TESSELLATION_TILED_OUT
  2647. tile_setting = (target->tiled != VG_LITE_LINEAR) ? 0x40 : 0;
  2648. #endif
  2649. /* Setup the command buffer. */
  2650. /* Program color register. */
  2651. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | s_context.capabilities.cap.tiled | blend_mode | tile_setting | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  2652. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  2653. /* Program tessellation control: for TS module. */
  2654. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000000 | format | quality | tiling | fill));
  2655. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  2656. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  2657. /* Program matrix. */
  2658. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void*)&new_matrix[0]));
  2659. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void*)&new_matrix[1]));
  2660. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void*)&new_matrix[2]));
  2661. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void*)&new_matrix[3]));
  2662. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void*)&new_matrix[4]));
  2663. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void*)&new_matrix[5]));
  2664. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACD, (void*)&matrix->m[0][2]));
  2665. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACE, (void*)&matrix->m[1][2]));
  2666. /* DDRLess does not support uploading path data. */
  2667. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1)
  2668. {
  2669. if (path->path_changed != 0) {
  2670. if (path->uploaded.handle != NULL) {
  2671. free_memory.memory_handle = path->uploaded.handle;
  2672. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_FREE, &free_memory));
  2673. path->uploaded.address = 0;
  2674. path->uploaded.memory = NULL;
  2675. path->uploaded.handle = NULL;
  2676. }
  2677. /* Allocate memory for the path data. */
  2678. memory.bytes = 16 + VG_LITE_ALIGN(path->path_length, 8);
  2679. return_offset = (8 + VG_LITE_ALIGN(path->path_length, 8)) / 4;
  2680. memory.contiguous = 1;
  2681. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &memory));
  2682. ((uint64_t*)memory.memory)[(path->path_length + 7) / 8] = 0;
  2683. ((uint32_t*)memory.memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  2684. ((uint32_t*)memory.memory)[1] = 0;
  2685. memcpy((uint8_t*)memory.memory + 8, path->path, path->path_length);
  2686. ((uint32_t*)memory.memory)[return_offset] = VG_LITE_RETURN();
  2687. ((uint32_t*)memory.memory)[return_offset + 1] = 0;
  2688. path->uploaded.handle = memory.memory_handle;
  2689. path->uploaded.memory = memory.memory;
  2690. path->uploaded.address = memory.memory_gpu;
  2691. path->uploaded.bytes = memory.bytes;
  2692. path->path_changed = 0;
  2693. }
  2694. }
  2695. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  2696. vglitemDUMP_BUFFER("path", (size_t)path->uploaded.address, (uint8_t*)(path->uploaded.memory), 0, path->uploaded.bytes);
  2697. }
  2698. #if !DUMP_COMMAND_CAPTURE
  2699. vglitemDUMP("@[memory 0x%08X 0x%08X]", s_context.tessbuf.physical_addr, s_context.tessbuf.tessbuf_size);
  2700. #endif
  2701. if (width + point_min.x > target->width) {
  2702. width = target->width - point_min.x;
  2703. }
  2704. #if (!gcFEATURE_VG_SPLIT_PATH || !gcFEATURE_VG_PARALLEL_PATHS || !gcFEATURE_VG_512_PARALLEL_PATHS)
  2705. s_context.tessbuf.tess_w_h = width | (height << 16);
  2706. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  2707. #if !gcFEATURE_VG_PARALLEL_PATHS
  2708. if (height <= 128)
  2709. parallel_workpaths1 = 4;
  2710. else
  2711. parallel_workpaths1 = height * 128 / 4096 - 1;
  2712. if (parallel_workpaths1 > parallel_workpaths2)
  2713. parallel_workpaths1 = parallel_workpaths2;
  2714. #endif
  2715. for (y = point_min.y; y < point_max.y; y += par_height) {
  2716. #if !gcFEATURE_VG_512_PARALLEL_PATHS
  2717. next_boundary = (y + 512) & 0xfffffe00;
  2718. #elif (!gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_SPLIT_PATH)
  2719. next_boundary = (y + 32) & 0xffffffe0;
  2720. #else
  2721. next_boundary = (y + 16) & 0xfffffff0;
  2722. #endif
  2723. par_height = ((next_boundary < point_max.y) ? next_boundary - y : (point_max.y - y));
  2724. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | s_context.capabilities.cap.tiled | blend_mode | tile_setting | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  2725. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  2726. /* Program tessellation control: for TS module. */
  2727. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000000 | format | quality | tiling | fill));
  2728. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  2729. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  2730. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  2731. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (par_height << 16)));
  2732. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  2733. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  2734. }
  2735. else {
  2736. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  2737. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00000101));
  2738. #if !gcFEATURE_VG_PARALLEL_PATHS
  2739. s_context.path_counter++;
  2740. if (parallel_workpaths1 == s_context.path_counter) {
  2741. VG_LITE_RETURN_ERROR(push_stall(&s_context, 7));
  2742. s_context.path_counter = 0;
  2743. }
  2744. #endif
  2745. }
  2746. }
  2747. }
  2748. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  2749. #if !gcFEATURE_VG_PARALLEL_PATHS
  2750. if (height <= 128)
  2751. parallel_workpaths1 = 4;
  2752. else
  2753. parallel_workpaths1 = height * 128 / 4096 - 1;
  2754. if (parallel_workpaths1 > parallel_workpaths2)
  2755. parallel_workpaths1 = parallel_workpaths2;
  2756. #endif
  2757. for (y = point_min.y; y < point_max.y; y += par_height) {
  2758. #if !gcFEATURE_VG_512_PARALLEL_PATHS
  2759. next_boundary = (y + 512) & 0xfffffe00;
  2760. #elif (!gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_SPLIT_PATH)
  2761. next_boundary = (y + 32) & 0xffffffe0;
  2762. #else
  2763. next_boundary = (y + 16) & 0xfffffff0;
  2764. #endif
  2765. par_height = ((next_boundary < point_max.y) ? next_boundary - y : (point_max.y - y));
  2766. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | s_context.capabilities.cap.tiled | blend_mode | tile_setting | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable));
  2767. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  2768. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  2769. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  2770. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (par_height << 16)));
  2771. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  2772. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  2773. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  2774. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  2775. }
  2776. else {
  2777. format = convert_path_format(VG_LITE_FP32);
  2778. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  2779. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  2780. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  2781. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00000101));
  2782. #if !gcFEATURE_VG_PARALLEL_PATHS
  2783. s_context.path_counter++;
  2784. if (parallel_workpaths1 == s_context.path_counter) {
  2785. VG_LITE_RETURN_ERROR(push_stall(&s_context, 7));
  2786. s_context.path_counter = 0;
  2787. }
  2788. #endif
  2789. }
  2790. }
  2791. }
  2792. #else
  2793. {
  2794. s_context.tessbuf.tess_w_h = width | (height << 16);
  2795. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  2796. /* Tessellate path. */
  2797. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  2798. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  2799. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (point_min.y << 16)));
  2800. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, s_context.tessbuf.tess_w_h));
  2801. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  2802. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  2803. }
  2804. else {
  2805. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  2806. }
  2807. }
  2808. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  2809. /* Tessellate path. */
  2810. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  2811. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  2812. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (point_min.y << 16)));
  2813. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, s_context.tessbuf.tess_w_h));
  2814. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  2815. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  2816. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  2817. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  2818. }
  2819. else {
  2820. format = convert_path_format(VG_LITE_FP32);
  2821. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  2822. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  2823. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  2824. }
  2825. }
  2826. }
  2827. #endif
  2828. #if gcFEATURE_VG_GLOBAL_ALPHA
  2829. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  2830. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_NORMAL, 0xFF));
  2831. }
  2832. #endif
  2833. return error;
  2834. }
  2835. /* GC555 vg_lite_draw_pattern API implementation
  2836. */
  2837. vg_lite_error_t vg_lite_draw_pattern(vg_lite_buffer_t *target,
  2838. vg_lite_path_t *path,
  2839. vg_lite_fill_t fill_rule,
  2840. vg_lite_matrix_t *path_matrix,
  2841. vg_lite_buffer_t *source,
  2842. vg_lite_matrix_t *pattern_matrix,
  2843. vg_lite_blend_t blend,
  2844. vg_lite_pattern_mode_t pattern_mode,
  2845. vg_lite_color_t pattern_color,
  2846. vg_lite_color_t color,
  2847. vg_lite_filter_t filter)
  2848. {
  2849. #if DUMP_API
  2850. FUNC_DUMP(vg_lite_draw_pattern)(target, path, fill_rule, path_matrix, source, pattern_matrix, blend, pattern_mode, pattern_color, color, filter);
  2851. #endif
  2852. #if gcFEATURE_VG_IM_INPUT
  2853. vg_lite_error_t error = VG_LITE_SUCCESS;
  2854. vg_lite_matrix_t inverse_matrix;
  2855. vg_lite_float_t x_step[3];
  2856. vg_lite_float_t y_step[3];
  2857. vg_lite_float_t c_step[3];
  2858. uint32_t imageMode = 0;
  2859. uint32_t blend_mode;
  2860. uint32_t filter_mode = 0;
  2861. int32_t stride;
  2862. uint32_t conversion = 0;
  2863. uint32_t tiled_source;
  2864. vg_lite_matrix_t matrix;
  2865. uint32_t pattern_tile = 0;
  2866. uint32_t transparency_mode = 0;
  2867. uint32_t tile_setting = 0;
  2868. uint32_t yuv2rgb = 0;
  2869. uint32_t uv_swiz = 0;
  2870. /* The following code is from "draw path" */
  2871. uint32_t format, quality, tiling, fill;
  2872. uint32_t tessellation_size;
  2873. vg_lite_kernel_allocate_t memory;
  2874. vg_lite_kernel_free_t free_memory;
  2875. uint32_t return_offset = 0;
  2876. vg_lite_point_t point_min = { 0 }, point_max = { 0 }, temp = { 0 };
  2877. int width, height;
  2878. uint8_t ts_is_fullscreen = 0;
  2879. float new_matrix[6];
  2880. float Scale, Bias;
  2881. uint32_t compress_mode;
  2882. uint32_t src_premultiply_enable = 0;
  2883. uint32_t index_endian = 0;
  2884. uint32_t in_premult = 0;
  2885. uint32_t paintType = 0;
  2886. uint32_t premul_flag = 0;
  2887. uint32_t prediv_flag = 0;
  2888. uint8_t lvgl_sw_blend = 0;
  2889. #if (!gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_512_PARALLEL_PATHS)
  2890. uint32_t parallel_workpaths1 = 2;
  2891. uint32_t parallel_workpaths2 = 2;
  2892. #endif
  2893. #if (!gcFEATURE_VG_SPLIT_PATH || !gcFEATURE_VG_PARALLEL_PATHS || !gcFEATURE_VG_512_PARALLEL_PATHS)
  2894. int32_t y = 0;
  2895. uint32_t par_height = 0;
  2896. int32_t next_boundary = 0;
  2897. #endif
  2898. #if gcFEATURE_VG_TRACE_API
  2899. VGLITE_LOG("vg_lite_draw_pattern %p %p %d %p %p %p %d %d 0x%08X %d\n",
  2900. target, path, fill_rule, path_matrix, source, pattern_matrix, blend, pattern_mode, pattern_color, filter);
  2901. #endif
  2902. #if gcFEATURE_VG_ERROR_CHECK
  2903. #if !gcFEATURE_VG_QUALITY_8X
  2904. if (path->quality == VG_LITE_UPPER) {
  2905. return VG_LITE_NOT_SUPPORT;
  2906. }
  2907. #endif
  2908. #if !gcFEATURE_VG_INDEX_ENDIAN
  2909. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  2910. return VG_LITE_NOT_SUPPORT;
  2911. }
  2912. #endif
  2913. #if !gcFEATURE_VG_RGBA8_ETC2_EAC
  2914. if (source->format == VG_LITE_RGBA8888_ETC2_EAC) {
  2915. return VG_LITE_NOT_SUPPORT;
  2916. }
  2917. #else
  2918. if ((source->format == VG_LITE_RGBA8888_ETC2_EAC) && (source->width % 16 || source->height % 4)) {
  2919. return VG_LITE_INVALID_ARGUMENT;
  2920. }
  2921. #endif
  2922. #if !gcFEATURE_VG_YUY2_INPUT
  2923. if (source->format == VG_LITE_YUYV || source->format == VG_LITE_YUY2) {
  2924. return VG_LITE_NOT_SUPPORT;
  2925. }
  2926. #endif
  2927. #if !gcFEATURE_VG_YUV_INPUT
  2928. if ((source->format >= VG_LITE_NV12 && source->format <= VG_LITE_NV16) || source->format == VG_LITE_NV24) {
  2929. return VG_LITE_NOT_SUPPORT;
  2930. }
  2931. #elif !gcFEATURE_VG_NV24_INPUT
  2932. if (source->format == VG_LITE_NV24) {
  2933. return VG_LITE_NOT_SUPPORT;
  2934. }
  2935. #endif
  2936. #if !gcFEATURE_VG_AYUV_INPUT
  2937. if (source->format == VG_LITE_ANV12 || source->format == VG_LITE_AYUY2) {
  2938. return VG_LITE_NOT_SUPPORT;
  2939. }
  2940. #endif
  2941. #if !gcFEATURE_VG_YUV_TILED_INPUT
  2942. if ((source->format >= VG_LITE_YUY2_TILED && source->format <= VG_LITE_AYUY2_TILED) || (source->format == VG_LITE_NV24_TILED)) {
  2943. return VG_LITE_NOT_SUPPORT;
  2944. }
  2945. #endif
  2946. #if !gcFEATURE_VG_24BIT
  2947. if ((target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) ||
  2948. (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658)) {
  2949. return VG_LITE_NOT_SUPPORT;
  2950. }
  2951. #endif
  2952. #if !gcFEATURE_VG_24BIT_PLANAR
  2953. if (source->format >= VG_LITE_ABGR8565_PLANAR && source->format <= VG_LITE_RGBA5658_PLANAR) {
  2954. return VG_LITE_NOT_SUPPORT;
  2955. }
  2956. #endif
  2957. #if !gcFEATURE_VG_STENCIL
  2958. if (source->image_mode == VG_LITE_STENCIL_MODE) {
  2959. return VG_LITE_NOT_SUPPORT;
  2960. }
  2961. #endif
  2962. #if !gcFEATURE_VG_NEW_BLEND_MODE
  2963. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  2964. return VG_LITE_NOT_SUPPORT;
  2965. }
  2966. #endif
  2967. if (!path || !path->path) {
  2968. return VG_LITE_INVALID_ARGUMENT;
  2969. }
  2970. VG_LITE_RETURN_ERROR(srcbuf_align_check(source));
  2971. VG_LITE_RETURN_ERROR(check_compress(source->format, source->compress_mode, source->tiled, source->width, source->height));
  2972. #endif /* gcFEATURE_VG_ERROR_CHECK */
  2973. #if !gcFEATURE_VG_LVGL_SUPPORT
  2974. if ((blend >= VG_LITE_BLEND_ADDITIVE_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) || (blend == VG_LITE_BLEND_NORMAL_LVGL && gcFEATURE_VG_SRC_PREMULTIPLIED)) {
  2975. if (!source->lvgl_buffer) {
  2976. source->lvgl_buffer = (vg_lite_buffer_t *)vg_lite_os_malloc(sizeof(vg_lite_buffer_t));
  2977. *source->lvgl_buffer = *source;
  2978. source->lvgl_buffer->lvgl_buffer = NULL;
  2979. vg_lite_allocate(source->lvgl_buffer);
  2980. }
  2981. /* Make sure render target is up to date before reading RT. */
  2982. vg_lite_finish();
  2983. setup_lvgl_image(target, source, source->lvgl_buffer, blend);
  2984. blend = VG_LITE_BLEND_SRC_OVER;
  2985. lvgl_sw_blend = 1;
  2986. }
  2987. #endif
  2988. if (!path->path_length) {
  2989. return VG_LITE_SUCCESS;
  2990. }
  2991. if (!path_matrix) {
  2992. path_matrix = &identity_mtx;
  2993. }
  2994. if (!pattern_matrix) {
  2995. pattern_matrix = &identity_mtx;
  2996. }
  2997. /* Work on pattern states. */
  2998. matrix = *pattern_matrix;
  2999. if (source->paintType == VG_LITE_PAINT_PATTERN)
  3000. {
  3001. matrix.m[2][0] = 0;
  3002. matrix.m[2][1] = 0;
  3003. matrix.m[2][2] = 1;
  3004. source->image_mode = VG_LITE_NONE_IMAGE_MODE;
  3005. }
  3006. #if gcFEATURE_VG_INDEX_ENDIAN
  3007. if ((source->format >= VG_LITE_INDEX_1) && (source->format <= VG_LITE_INDEX_4) && source->index_endian) {
  3008. index_endian = 1 << 14;
  3009. }
  3010. #endif
  3011. #if gcFEATURE_VG_GAMMA
  3012. save_st_gamma_src_dest(source, target);
  3013. #endif
  3014. #if gcFEATURE_VG_GLOBAL_ALPHA
  3015. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  3016. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_GLOBAL, 0xff));
  3017. }
  3018. #endif
  3019. /*blend input into context*/
  3020. s_context.blend_mode = blend;
  3021. in_premult = 0x00000000;
  3022. /* Adjust premultiply setting according to openvg condition */
  3023. src_premultiply_enable = 0x01000100;
  3024. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  3025. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  3026. prediv_flag = 0;
  3027. }
  3028. else {
  3029. prediv_flag = 1;
  3030. }
  3031. if ((s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE
  3032. || (s_context.blend_mode >= VG_LITE_BLEND_NORMAL_LVGL && s_context.blend_mode <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  3033. premul_flag = 1;
  3034. }
  3035. else {
  3036. premul_flag = 0;
  3037. }
  3038. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  3039. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  3040. src_premultiply_enable = 0x01000100;
  3041. in_premult = 0x10000000;
  3042. }
  3043. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  3044. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  3045. src_premultiply_enable = 0x00000100;
  3046. in_premult = 0x00000000;
  3047. }
  3048. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  3049. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  3050. src_premultiply_enable = 0x01000100;
  3051. in_premult = 0x00000000;
  3052. }
  3053. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  3054. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  3055. src_premultiply_enable = 0x00000100;
  3056. in_premult = 0x00000000;
  3057. }
  3058. if ((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  3059. in_premult = 0x00000000;
  3060. }
  3061. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  3062. target->apply_premult = 1;
  3063. }
  3064. else {
  3065. target->apply_premult = 0;
  3066. }
  3067. #if (gcFEATURE_VG_SRC_PREMULTIPLIED == 0)
  3068. if (blend == VG_LITE_BLEND_NORMAL_LVGL)
  3069. in_premult = 0x00000000;
  3070. #endif
  3071. error = set_render_target(target);
  3072. if (error != VG_LITE_SUCCESS) {
  3073. return error;
  3074. }
  3075. if ((target->format == VG_LITE_YUYV || target->format == VG_LITE_YUY2 || target->format == VG_LITE_YUY2_TILED
  3076. || target->format == VG_LITE_AYUY2 || target->format == VG_LITE_AYUY2_TILED)
  3077. && path->quality != VG_LITE_LOW)
  3078. {
  3079. path->quality = VG_LITE_LOW;
  3080. printf("If target is YUV group , the path qulity should use VG_LITE_LOW.\n");
  3081. }
  3082. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  3083. width = target->width;
  3084. height = target->height;
  3085. if (s_context.scissor_set) {
  3086. width = s_context.scissor[2] - s_context.scissor[0];
  3087. height = s_context.scissor[3] - s_context.scissor[1];
  3088. }
  3089. if (width == 0 || height == 0)
  3090. return VG_LITE_NO_CONTEXT;
  3091. if ((target->width <= width) && (target->height <= height) && (!s_context.scissor_set))
  3092. {
  3093. ts_is_fullscreen = 1;
  3094. point_min.x = 0;
  3095. point_min.y = 0;
  3096. point_max.x = target->width;
  3097. point_max.y = target->height;
  3098. }
  3099. /* If target is L8 and source is in YUV or RGB (not L8 or A8) then we have to convert RGB into L8. */
  3100. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  3101. conversion = 0x80000000;
  3102. }
  3103. /* Compute inverse matrix. */
  3104. if (!inverse(&inverse_matrix, &matrix))
  3105. return VG_LITE_INVALID_ARGUMENT;
  3106. #if gcFEATURE_VG_MATH_PRECISION_FIX
  3107. /* Compute interpolation steps. */
  3108. x_step[0] = inverse_matrix.m[0][0];
  3109. x_step[1] = inverse_matrix.m[1][0];
  3110. x_step[2] = inverse_matrix.m[2][0];
  3111. y_step[0] = inverse_matrix.m[0][1];
  3112. y_step[1] = inverse_matrix.m[1][1];
  3113. y_step[2] = inverse_matrix.m[2][1];
  3114. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]);
  3115. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  3116. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3117. #else
  3118. /* Compute interpolation steps. */
  3119. x_step[0] = inverse_matrix.m[0][0] / source->width;
  3120. x_step[1] = inverse_matrix.m[1][0] / source->height;
  3121. x_step[2] = inverse_matrix.m[2][0];
  3122. y_step[0] = inverse_matrix.m[0][1] / source->width;
  3123. y_step[1] = inverse_matrix.m[1][1] / source->height;
  3124. y_step[2] = inverse_matrix.m[2][1];
  3125. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / source->width;
  3126. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source->height;
  3127. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3128. #endif
  3129. /* Determine image mode (NORMAL, NONE , MULTIPLY or STENCIL) depending on the color. */
  3130. switch (source->image_mode) {
  3131. case VG_LITE_NONE_IMAGE_MODE:
  3132. imageMode = 0x0;
  3133. break;
  3134. case VG_LITE_MULTIPLY_IMAGE_MODE:
  3135. imageMode = 0x00002000;
  3136. break;
  3137. case VG_LITE_NORMAL_IMAGE_MODE:
  3138. case VG_LITE_ZERO:
  3139. imageMode = 0x00001000;
  3140. break;
  3141. case VG_LITE_STENCIL_MODE:
  3142. imageMode = 0x00003000;
  3143. break;
  3144. case VG_LITE_RECOLOR_MODE:
  3145. imageMode = 0x00006000;
  3146. break;
  3147. }
  3148. switch (filter) {
  3149. case VG_LITE_FILTER_POINT:
  3150. filter_mode = 0;
  3151. break;
  3152. case VG_LITE_FILTER_LINEAR:
  3153. filter_mode = 0x10000;
  3154. break;
  3155. case VG_LITE_FILTER_BI_LINEAR:
  3156. filter_mode = 0x20000;
  3157. break;
  3158. case VG_LITE_FILTER_GAUSSIAN:
  3159. filter_mode = 0x30000;
  3160. break;
  3161. }
  3162. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  3163. compress_mode = (uint32_t)source->compress_mode << 25;
  3164. if (pattern_mode == VG_LITE_PATTERN_COLOR)
  3165. {
  3166. uint8_t a,r,g,b;
  3167. pattern_tile = 0;
  3168. a = pattern_color >> 24;
  3169. r = pattern_color >> 16;
  3170. g = pattern_color >> 8;
  3171. b = pattern_color;
  3172. pattern_color = (a << 24) | (b << 16) | (g << 8) | r;
  3173. }
  3174. else if (pattern_mode == VG_LITE_PATTERN_PAD)
  3175. {
  3176. pattern_tile = 0x1000;
  3177. }
  3178. #if gcFEATURE_VG_IM_REPEAT_REFLECT
  3179. else if (pattern_mode == VG_LITE_PATTERN_REPEAT)
  3180. {
  3181. pattern_tile = 0x2000;
  3182. }
  3183. else if (pattern_mode == VG_LITE_PATTERN_REFLECT)
  3184. {
  3185. pattern_tile = 0x3000;
  3186. }
  3187. #endif
  3188. else
  3189. {
  3190. return VG_LITE_INVALID_ARGUMENT;
  3191. }
  3192. if (source->paintType == VG_LITE_PAINT_PATTERN)
  3193. {
  3194. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A04, (void *) &c_step[0]));
  3195. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A05, (void *) &c_step[1]));
  3196. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A06, (void *) &x_step[0]));
  3197. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A07, (void *) &x_step[1]));
  3198. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A08, (void *) &y_step[0]));
  3199. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A09, (void *) &y_step[1]));
  3200. }
  3201. /* Setup the command buffer. */
  3202. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *) &c_step[0]));
  3203. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *) &c_step[1]));
  3204. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *) &c_step[2]));
  3205. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *) &x_step[0]));
  3206. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *) &x_step[1]));
  3207. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *) &x_step[2]));
  3208. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  3209. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *) &y_step[0]));
  3210. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *) &y_step[1]));
  3211. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *) &y_step[2]));
  3212. if (((source->format >= VG_LITE_YUY2) &&
  3213. (source->format <= VG_LITE_AYUY2)) ||
  3214. ((source->format >= VG_LITE_YUY2_TILED) &&
  3215. (source->format <= VG_LITE_AYUY2_TILED))) {
  3216. yuv2rgb = convert_yuv2rgb(source->yuv.yuv2rgb);
  3217. uv_swiz = convert_uv_swizzle(source->yuv.swizzle);
  3218. }
  3219. blend_mode = convert_blend(blend);
  3220. if (source->paintType == VG_LITE_PAINT_PATTERN)
  3221. {
  3222. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source->format) | filter_mode | pattern_tile | uv_swiz | yuv2rgb | conversion | compress_mode | src_premultiply_enable | index_endian));
  3223. if (source->yuv.uv_planar) {
  3224. /* Program u plane address if necessary. */
  3225. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A50, source->yuv.uv_planar));
  3226. }
  3227. if (source->yuv.v_planar) {
  3228. /* Program v plane address if necessary. */
  3229. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A52, source->yuv.v_planar));
  3230. }
  3231. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, pattern_color));
  3232. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source->address));
  3233. /* 24bit format stride configured to 4bpp. */
  3234. if (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658) {
  3235. stride = source->stride / 3 * 4;
  3236. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, stride | tiled_source));
  3237. }
  3238. else {
  3239. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, source->stride | tiled_source));
  3240. }
  3241. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  3242. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source->width | (source->height << 16)));
  3243. }
  3244. else
  3245. {
  3246. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A25, convert_source_format(source->format) | filter_mode | pattern_tile | uv_swiz | yuv2rgb | conversion | compress_mode | src_premultiply_enable | index_endian));
  3247. if (source->yuv.uv_planar) {
  3248. /* Program u plane address if necessary. */
  3249. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source->yuv.uv_planar));
  3250. }
  3251. if (source->yuv.v_planar) {
  3252. /* Program v plane address if necessary. */
  3253. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.v_planar));
  3254. }
  3255. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A27, pattern_color));
  3256. #if !gcFEATURE_VG_LVGL_SUPPORT
  3257. if (lvgl_sw_blend) {
  3258. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->lvgl_buffer->address));
  3259. }
  3260. else
  3261. #endif
  3262. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A29, source->address));
  3263. /* 24bit format stride configured to 4bpp. */
  3264. if (source->format >= VG_LITE_RGB888 && source->format <= VG_LITE_RGBA5658) {
  3265. stride = source->stride / 3 * 4;
  3266. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, stride | tiled_source));
  3267. }
  3268. else {
  3269. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2B, source->stride | tiled_source));
  3270. }
  3271. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2D, 0));
  3272. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2F, source->width | (source->height << 16)));
  3273. }
  3274. /* Work on path states. */
  3275. matrix = *path_matrix;
  3276. if (ts_is_fullscreen == 0) {
  3277. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], &matrix);
  3278. point_min = point_max = temp;
  3279. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], &matrix);
  3280. if (temp.x < point_min.x) point_min.x = temp.x;
  3281. if (temp.y < point_min.y) point_min.y = temp.y;
  3282. if (temp.x > point_max.x) point_max.x = temp.x;
  3283. if (temp.y > point_max.y) point_max.y = temp.y;
  3284. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], &matrix);
  3285. if (temp.x < point_min.x) point_min.x = temp.x;
  3286. if (temp.y < point_min.y) point_min.y = temp.y;
  3287. if (temp.x > point_max.x) point_max.x = temp.x;
  3288. if (temp.y > point_max.y) point_max.y = temp.y;
  3289. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], &matrix);
  3290. if (temp.x < point_min.x) point_min.x = temp.x;
  3291. if (temp.y < point_min.y) point_min.y = temp.y;
  3292. if (temp.x > point_max.x) point_max.x = temp.x;
  3293. if (temp.y > point_max.y) point_max.y = temp.y;
  3294. point_min.x = MAX(point_min.x, 0);
  3295. point_min.y = MAX(point_min.y, 0);
  3296. point_max.x = MIN(point_max.x, target->width);
  3297. point_max.y = MIN(point_max.y, target->height);
  3298. if (s_context.scissor_set) {
  3299. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  3300. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  3301. point_max.x = MIN(point_max.x, s_context.scissor[2]);
  3302. point_max.y = MIN(point_max.y, s_context.scissor[3]);
  3303. }
  3304. }
  3305. width = point_max.x - point_min.x;
  3306. height = point_max.y - point_min.y;
  3307. Scale = 1.0f;
  3308. Bias = 0.0f;
  3309. new_matrix[0] = matrix.m[0][0] * Scale;
  3310. new_matrix[1] = matrix.m[0][1] * Scale;
  3311. new_matrix[2] = (matrix.m[0][0] + matrix.m[0][1]) * Bias + matrix.m[0][2];
  3312. new_matrix[3] = matrix.m[1][0] * Scale;
  3313. new_matrix[4] = matrix.m[1][1] * Scale;
  3314. new_matrix[5] = (matrix.m[1][0] + matrix.m[1][1]) * Bias + matrix.m[1][2];
  3315. /* Convert states into hardware values. */
  3316. format = convert_path_format(path->format);
  3317. quality = convert_path_quality(path->quality);
  3318. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  3319. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  3320. tessellation_size = s_context.tessbuf.tessbuf_size;
  3321. #if gcFEATURE_VG_TESSELLATION_TILED_OUT
  3322. tile_setting = (target->tiled != VG_LITE_LINEAR) ? 0x40 : 0;
  3323. #endif
  3324. if (source->paintType == VG_LITE_PAINT_PATTERN) {
  3325. paintType = 1 << 24 | 1 << 25;
  3326. }
  3327. /* Setup the command buffer. */
  3328. #if gcFEATURE_VG_GLOBAL_ALPHA
  3329. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0AD1, s_context.dst_alpha_mode | s_context.dst_alpha_value | s_context.src_alpha_mode | s_context.src_alpha_value));
  3330. #endif
  3331. /* Program color register. */
  3332. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | paintType |s_context.capabilities.cap.tiled | imageMode | blend_mode | transparency_mode | tile_setting | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable | 0x2));
  3333. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000000 | format | quality | tiling | fill));
  3334. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  3335. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  3336. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));
  3337. /* Program matrix. */
  3338. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &new_matrix[0]));
  3339. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &new_matrix[1]));
  3340. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &new_matrix[2]));
  3341. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &new_matrix[3]));
  3342. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &new_matrix[4]));
  3343. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &new_matrix[5]));
  3344. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACD, (void *) &matrix.m[0][2]));
  3345. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACE, (void *) &matrix.m[1][2]));
  3346. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1)
  3347. {
  3348. if (path->path_changed != 0) {
  3349. if (path->uploaded.handle != NULL) {
  3350. free_memory.memory_handle = path->uploaded.handle;
  3351. vg_lite_kernel(VG_LITE_FREE, &free_memory);
  3352. path->uploaded.address = 0;
  3353. path->uploaded.memory = NULL;
  3354. path->uploaded.handle = NULL;
  3355. }
  3356. /* Allocate memory for the path data. */
  3357. memory.bytes = 16 + VG_LITE_ALIGN(path->path_length, 8);
  3358. return_offset = (8 + VG_LITE_ALIGN(path->path_length, 8)) / 4;
  3359. memory.contiguous = 1;
  3360. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &memory));
  3361. ((uint64_t *) memory.memory)[(path->path_length + 7) / 8] = 0;
  3362. ((uint32_t *) memory.memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  3363. ((uint32_t *) memory.memory)[1] = 0;
  3364. memcpy((uint8_t *) memory.memory + 8, path->path, path->path_length);
  3365. ((uint32_t *) memory.memory)[return_offset] = VG_LITE_RETURN();
  3366. ((uint32_t *) memory.memory)[return_offset + 1] = 0;
  3367. path->uploaded.handle = memory.memory_handle;
  3368. path->uploaded.memory = memory.memory;
  3369. path->uploaded.address = memory.memory_gpu;
  3370. path->uploaded.bytes = memory.bytes;
  3371. path->path_changed = 0;
  3372. }
  3373. }
  3374. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3375. vglitemDUMP_BUFFER("path", (size_t)path->uploaded.address, (uint8_t *)(path->uploaded.memory), 0, path->uploaded.bytes);
  3376. }
  3377. #if !DUMP_COMMAND_CAPTURE
  3378. vglitemDUMP("@[memory 0x%08X 0x%08X]", s_context.tessbuf.physical_addr, s_context.tessbuf.tessbuf_size);
  3379. #endif
  3380. if (width + point_min.x > target->width) {
  3381. width = target->width - point_min.x;
  3382. }
  3383. #if (!gcFEATURE_VG_SPLIT_PATH || !gcFEATURE_VG_PARALLEL_PATHS || !gcFEATURE_VG_512_PARALLEL_PATHS)
  3384. s_context.tessbuf.tess_w_h = width | (height << 16);
  3385. #if !gcFEATURE_VG_PARALLEL_PATHS
  3386. if (height <= 128)
  3387. parallel_workpaths1 = 4;
  3388. else
  3389. parallel_workpaths1 = height * 128 / 4096 - 1;
  3390. if (parallel_workpaths1 > parallel_workpaths2)
  3391. parallel_workpaths1 = parallel_workpaths2;
  3392. #endif
  3393. for (y = point_min.y; y < point_max.y; y += par_height) {
  3394. #if !gcFEATURE_VG_512_PARALLEL_PATHS
  3395. next_boundary = (y + 512) & 0xfffffe00;
  3396. #elif (!gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_SPLIT_PATH)
  3397. next_boundary = (y + 32) & 0xffffffe0;
  3398. #else
  3399. next_boundary = (y + 16) & 0xfffffff0;
  3400. #endif
  3401. par_height = ((next_boundary < point_max.y) ? next_boundary - y : (point_max.y - y));
  3402. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, in_premult | paintType | s_context.capabilities.cap.tiled | imageMode | blend_mode | transparency_mode | tile_setting | s_context.enable_mask | s_context.scissor_enable | s_context.color_transform | s_context.matrix_enable | 0x2));
  3403. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000000 | format | quality | tiling | fill));
  3404. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, color));;
  3405. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  3406. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  3407. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  3408. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (par_height << 16)));
  3409. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3410. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  3411. }
  3412. else {
  3413. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO)
  3414. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  3415. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  3416. format = convert_path_format(VG_LITE_FP32);
  3417. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  3418. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  3419. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  3420. }
  3421. #if !gcFEATURE_VG_PARALLEL_PATHS
  3422. s_context.path_counter++;
  3423. if (parallel_workpaths1 == s_context.path_counter) {
  3424. VG_LITE_RETURN_ERROR(push_stall(&s_context, 7));
  3425. s_context.path_counter = 0;
  3426. }
  3427. #endif
  3428. }
  3429. }
  3430. #else
  3431. {
  3432. /* Tessellate path. */
  3433. s_context.tessbuf.tess_w_h = width | (height << 16);
  3434. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  3435. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  3436. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (point_min.y << 16)));
  3437. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, s_context.tessbuf.tess_w_h));
  3438. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3439. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  3440. }
  3441. else {
  3442. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO)
  3443. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  3444. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  3445. format = convert_path_format(VG_LITE_FP32);
  3446. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  3447. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  3448. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  3449. }
  3450. }
  3451. }
  3452. #endif
  3453. #if gcFEATURE_VG_GLOBAL_ALPHA
  3454. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  3455. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_NORMAL, 0xFF));
  3456. }
  3457. #endif
  3458. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride)*(source->height));
  3459. #if DUMP_IMAGE
  3460. dump_img(source->memory, source->width, source->height, source->format);
  3461. #endif
  3462. return error;
  3463. #else
  3464. return VG_LITE_NOT_SUPPORT;
  3465. #endif
  3466. }
  3467. /* GC555 vg_lite_draw_linear_grad API implementation
  3468. */
  3469. vg_lite_error_t vg_lite_draw_linear_grad(vg_lite_buffer_t* target,
  3470. vg_lite_path_t* path,
  3471. vg_lite_fill_t fill_rule,
  3472. vg_lite_matrix_t* path_matrix,
  3473. vg_lite_ext_linear_gradient_t* grad,
  3474. vg_lite_color_t paint_color,
  3475. vg_lite_blend_t blend,
  3476. vg_lite_filter_t filter)
  3477. {
  3478. #if DUMP_API
  3479. FUNC_DUMP(vg_lite_draw_linear_grad)(target, path, fill_rule, path_matrix, grad, paint_color, blend, filter);
  3480. #endif
  3481. #if gcFEATURE_VG_LINEAR_GRADIENT_EXT && gcFEATURE_VG_IM_INPUT
  3482. vg_lite_error_t error = VG_LITE_SUCCESS;
  3483. uint32_t image_mode = 0;
  3484. uint32_t blend_mode;
  3485. uint32_t filter_mode = 0;
  3486. uint32_t conversion = 0;
  3487. uint32_t tiled_source;
  3488. vg_lite_matrix_t inverse_matrix;
  3489. vg_lite_float_t x_step[3];
  3490. vg_lite_float_t y_step[3];
  3491. vg_lite_float_t c_step[3];
  3492. vg_lite_buffer_t* source = &grad->image;
  3493. vg_lite_matrix_t* matrix = &grad->matrix;
  3494. uint32_t linear_tile = 0;
  3495. uint32_t transparency_mode = 0;
  3496. uint32_t yuv2rgb = 0;
  3497. uint32_t uv_swiz = 0;
  3498. uint32_t in_premult = 0;
  3499. uint32_t src_premultiply_enable = 0;
  3500. uint32_t premul_flag = 0;
  3501. uint32_t prediv_flag = 0;
  3502. void* data;
  3503. /* The following code is from "draw path" */
  3504. uint32_t format, quality, tiling, fill;
  3505. uint32_t tessellation_size;
  3506. vg_lite_kernel_allocate_t memory;
  3507. vg_lite_kernel_free_t free_memory;
  3508. uint32_t return_offset = 0;
  3509. vg_lite_point_t point_min = { 0 }, point_max = { 0 }, temp = { 0 };
  3510. int width, height;
  3511. uint8_t ts_is_fullscreen = 0;
  3512. float new_matrix[6];
  3513. float Scale, Bias;
  3514. vg_lite_float_t dx, dy, dxdx_dydy;
  3515. vg_lite_float_t lg_step_x_lin, lg_step_y_lin, lg_constant_lin;
  3516. #if !gcFEATURE_VG_PARALLEL_PATHS
  3517. uint32_t parallel_workpaths1 = 2;
  3518. uint32_t parallel_workpaths2 = 2;
  3519. #endif
  3520. int y;
  3521. int temp_height = 0;
  3522. #if gcFEATURE_VG_TRACE_API
  3523. VGLITE_LOG("vg_lite_draw_linear_grad %p %p %d %p %p 0x%08X %d %d\n",
  3524. target, path, fill_rule, path_matrix, grad, paint_color, blend, filter);
  3525. #endif
  3526. #if gcFEATURE_VG_ERROR_CHECK
  3527. #if !gcFEATURE_VG_QUALITY_8X
  3528. if (path->quality == VG_LITE_UPPER) {
  3529. return VG_LITE_NOT_SUPPORT;
  3530. }
  3531. #endif
  3532. #if !gcFEATURE_VG_LVGL_SUPPORT
  3533. if ((blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  3534. return VG_LITE_NOT_SUPPORT;
  3535. }
  3536. #endif
  3537. #if !gcFEATURE_VG_24BIT
  3538. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  3539. return VG_LITE_NOT_SUPPORT;
  3540. }
  3541. #endif
  3542. #if !gcFEATURE_VG_STENCIL
  3543. if (source->image_mode == VG_LITE_STENCIL_MODE) {
  3544. return VG_LITE_NOT_SUPPORT;
  3545. }
  3546. #endif
  3547. #if !gcFEATURE_VG_NEW_BLEND_MODE
  3548. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  3549. return VG_LITE_NOT_SUPPORT;
  3550. }
  3551. #endif
  3552. #if !gcFEATURE_VG_IM_REPEAT_REFLECT
  3553. if (grad->spread_mode == VG_LITE_GRADIENT_SPREAD_REPEAT || grad->spread_mode == VG_LITE_GRADIENT_SPREAD_REFLECT) {
  3554. return VG_LITE_NOT_SUPPORT;
  3555. }
  3556. #endif
  3557. if (source->format == VG_LITE_A4 || source->format == VG_LITE_A8) {
  3558. return VG_LITE_NOT_SUPPORT;
  3559. }
  3560. if (!path || !path->path) {
  3561. return VG_LITE_INVALID_ARGUMENT;
  3562. }
  3563. #endif /* gcFEATURE_VG_ERROR_CHECK */
  3564. if (!path->path_length) {
  3565. return VG_LITE_SUCCESS;
  3566. }
  3567. if (!path_matrix) {
  3568. path_matrix = &identity_mtx;
  3569. }
  3570. #if gcFEATURE_VG_GAMMA
  3571. set_gamma_dest_only(target, VGL_TRUE);
  3572. #endif
  3573. #if gcFEATURE_VG_GLOBAL_ALPHA
  3574. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  3575. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_GLOBAL, 0xff));
  3576. }
  3577. #endif
  3578. /*blend input into context*/
  3579. s_context.blend_mode = blend;
  3580. src_premultiply_enable = 0x01000100;
  3581. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  3582. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  3583. prediv_flag = 0;
  3584. }
  3585. else {
  3586. prediv_flag = 1;
  3587. }
  3588. if ((s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE
  3589. || (s_context.blend_mode >= VG_LITE_BLEND_NORMAL_LVGL && s_context.blend_mode <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  3590. premul_flag = 1;
  3591. }
  3592. else {
  3593. premul_flag = 0;
  3594. }
  3595. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  3596. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  3597. src_premultiply_enable = 0x01000100;
  3598. in_premult = 0x10000000;
  3599. }
  3600. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  3601. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  3602. src_premultiply_enable = 0x00000100;
  3603. in_premult = 0x00000000;
  3604. }
  3605. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  3606. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  3607. src_premultiply_enable = 0x01000100;
  3608. in_premult = 0x00000000;
  3609. }
  3610. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  3611. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  3612. src_premultiply_enable = 0x00000100;
  3613. in_premult = 0x00000000;
  3614. }
  3615. if ((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  3616. #if (CHIPID==0x255)
  3617. src_premultiply_enable = 0x00000000;
  3618. #endif
  3619. in_premult = 0x00000000;
  3620. }
  3621. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  3622. target->apply_premult = 1;
  3623. }
  3624. else {
  3625. target->apply_premult = 0;
  3626. }
  3627. error = set_render_target(target);
  3628. if (error != VG_LITE_SUCCESS) {
  3629. return error;
  3630. } else if (error == VG_LITE_NO_CONTEXT) {
  3631. /* If scissoring is enabled and no valid scissoring rectangles
  3632. are present, no drawing occurs */
  3633. return VG_LITE_SUCCESS;
  3634. }
  3635. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  3636. width = s_context.tessbuf.tess_w_h & 0xFFFF;
  3637. height = s_context.tessbuf.tess_w_h >> 16;
  3638. if (width == 0 || height == 0)
  3639. return VG_LITE_NO_CONTEXT;
  3640. if ((target->width <= width) && (target->height <= height) && (!s_context.scissor_set))
  3641. {
  3642. ts_is_fullscreen = 1;
  3643. point_min.x = 0;
  3644. point_min.y = 0;
  3645. point_max.x = target->width;
  3646. point_max.y = target->height;
  3647. }
  3648. /* If target is L8 and source is in YUV or RGB (not L8 or A8) then we have to convert RGB into L8. */
  3649. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  3650. conversion = 0x80000000;
  3651. }
  3652. /* Determine image mode (NORMAL, NONE , MULTIPLY or STENCIL) depending on the color. */
  3653. switch (source->image_mode) {
  3654. case VG_LITE_NONE_IMAGE_MODE:
  3655. image_mode = 0x0;
  3656. break;
  3657. case VG_LITE_MULTIPLY_IMAGE_MODE:
  3658. return VG_LITE_INVALID_ARGUMENT;
  3659. case VG_LITE_NORMAL_IMAGE_MODE:
  3660. case VG_LITE_ZERO:
  3661. image_mode = 0x00001000;
  3662. break;
  3663. case VG_LITE_STENCIL_MODE:
  3664. image_mode = 0x00003000;
  3665. break;
  3666. case VG_LITE_RECOLOR_MODE:
  3667. image_mode = 0x00006000;
  3668. break;
  3669. }
  3670. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  3671. switch (grad->spread_mode) {
  3672. case VG_LITE_GRADIENT_SPREAD_FILL:
  3673. linear_tile = 0x0;
  3674. break;
  3675. case VG_LITE_GRADIENT_SPREAD_PAD:
  3676. linear_tile = 0x1000;
  3677. break;
  3678. case VG_LITE_GRADIENT_SPREAD_REPEAT:
  3679. linear_tile = 0x2000;
  3680. break;
  3681. case VG_LITE_GRADIENT_SPREAD_REFLECT:
  3682. linear_tile = 0x3000;
  3683. break;
  3684. }
  3685. switch (filter) {
  3686. case VG_LITE_FILTER_POINT:
  3687. filter_mode = 0;
  3688. break;
  3689. case VG_LITE_FILTER_LINEAR:
  3690. filter_mode = 0x10000;
  3691. break;
  3692. case VG_LITE_FILTER_BI_LINEAR:
  3693. filter_mode = 0x20000;
  3694. break;
  3695. case VG_LITE_FILTER_GAUSSIAN:
  3696. filter_mode = 0x30000;
  3697. break;
  3698. }
  3699. if (grad->spread_mode == VG_LITE_GRADIENT_SPREAD_FILL)
  3700. {
  3701. uint8_t a,r,g,b;
  3702. a = paint_color >> 24;
  3703. r = paint_color >> 16;
  3704. g = paint_color >> 8;
  3705. b = paint_color;
  3706. paint_color = (a << 24) | (b << 16) | (g << 8) | r;
  3707. }
  3708. /* compute linear gradient paremeters */
  3709. /* Compute inverse matrix. */
  3710. if (!inverse(&inverse_matrix, matrix))
  3711. return VG_LITE_INVALID_ARGUMENT;
  3712. dx = grad->linear_grad.X1 - grad->linear_grad.X0;
  3713. dy = grad->linear_grad.Y1 - grad->linear_grad.Y0;
  3714. #if gcFEATURE_VG_MATH_PRECISION_FIX
  3715. dxdx_dydy = (vg_lite_float_t)((dx * dx + dy * dy) / sqrt((dx + 1) * (dx + 1) + (dy + 1) * (dy + 1)));
  3716. #else
  3717. dxdx_dydy = dx * dx + dy * dy;
  3718. #endif
  3719. /*
  3720. ** dx (T(x) - x0) + dy (T(y) - y0)
  3721. ** g = -------------------------------
  3722. ** dx^2 + dy^2
  3723. **
  3724. ** where
  3725. **
  3726. ** dx := x1 - x0
  3727. ** dy := y1 - y0
  3728. ** T(x) := (x + 0.5) m00 + (y + 0.5) m01 + m02
  3729. ** = x m00 + y m01 + 0.5 (m00 + m01) + m02
  3730. ** T(y) := (x + 0.5) m10 + (y + 0.5) m11 + m12
  3731. ** = x m10 + y m11 + 0.5 (m10 + m11) + m12.
  3732. **
  3733. ** We can factor the top line into:
  3734. **
  3735. ** = dx (x m00 + y m01 + 0.5 (m00 + m01) + m02 - x0)
  3736. ** + dy (x m10 + y m11 + 0.5 (m10 + m11) + m12 - y0)
  3737. **
  3738. ** = x (dx m00 + dy m10)
  3739. ** + y (dx m01 + dy m11)
  3740. ** + dx (0.5 (m00 + m01) + m02 - x0)
  3741. ** + dy (0.5 (m10 + m11) + m12 - y0).
  3742. */
  3743. lg_step_x_lin
  3744. = (dx * MAT(&inverse_matrix, 0, 0) + dy * MAT(&inverse_matrix, 1, 0))
  3745. / dxdx_dydy;
  3746. lg_step_y_lin
  3747. = (dx * MAT(&inverse_matrix, 0, 1) + dy * MAT(&inverse_matrix, 1, 1))
  3748. / dxdx_dydy;
  3749. lg_constant_lin =
  3750. (
  3751. (
  3752. 0.5f * ( MAT(&inverse_matrix, 0, 0) + MAT(&inverse_matrix, 0, 1) )
  3753. + MAT(&inverse_matrix, 0, 2) - grad->linear_grad.X0
  3754. ) * dx
  3755. +
  3756. (
  3757. 0.5f * ( MAT(&inverse_matrix, 1, 0) + MAT(&inverse_matrix, 1, 1) )
  3758. + MAT(&inverse_matrix, 1, 2) - grad->linear_grad.Y0
  3759. ) * dy
  3760. )
  3761. / dxdx_dydy;
  3762. /* Setup the command buffer. */
  3763. /* linear gradient parameters*/
  3764. data = &lg_constant_lin;
  3765. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A04,*(uint32_t*) data));
  3766. data = &lg_step_x_lin;
  3767. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A06,*(uint32_t*) data));
  3768. data = &lg_step_y_lin;
  3769. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A08,*(uint32_t*) data));
  3770. /* Compute inverse matrix. */
  3771. if (!inverse(&inverse_matrix, matrix))
  3772. return VG_LITE_INVALID_ARGUMENT;
  3773. #if gcFEATURE_VG_MATH_PRECISION_FIX
  3774. /* Compute interpolation steps. */
  3775. x_step[0] = inverse_matrix.m[0][0];
  3776. x_step[1] = inverse_matrix.m[1][0];
  3777. x_step[2] = inverse_matrix.m[2][0];
  3778. y_step[0] = inverse_matrix.m[0][1];
  3779. y_step[1] = inverse_matrix.m[1][1];
  3780. y_step[2] = inverse_matrix.m[2][1];
  3781. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]);
  3782. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  3783. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3784. #else
  3785. /* Compute interpolation steps. */
  3786. x_step[0] = inverse_matrix.m[0][0] / source->width;
  3787. x_step[1] = inverse_matrix.m[1][0] / source->height;
  3788. x_step[2] = inverse_matrix.m[2][0];
  3789. y_step[0] = inverse_matrix.m[0][1] / source->width;
  3790. y_step[1] = inverse_matrix.m[1][1] / source->height;
  3791. y_step[2] = inverse_matrix.m[2][1];
  3792. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / source->width;
  3793. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source->height;
  3794. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  3795. #endif
  3796. /* Setup the command buffer. */
  3797. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *) &c_step[0]));
  3798. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *) &c_step[1]));
  3799. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *) &c_step[2]));
  3800. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *) &x_step[0]));
  3801. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *) &x_step[1]));
  3802. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *) &x_step[2]));
  3803. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  3804. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *) &y_step[0]));
  3805. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *) &y_step[1]));
  3806. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *) &y_step[2]));
  3807. if (((source->format >= VG_LITE_YUY2) &&
  3808. (source->format <= VG_LITE_AYUY2)) ||
  3809. ((source->format >= VG_LITE_YUY2_TILED) &&
  3810. (source->format <= VG_LITE_AYUY2_TILED))) {
  3811. yuv2rgb = convert_yuv2rgb(source->yuv.yuv2rgb);
  3812. uv_swiz = convert_uv_swizzle(source->yuv.swizzle);
  3813. }
  3814. if (source->yuv.uv_planar) {
  3815. /* Program u plane address if necessary. */
  3816. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source->yuv.uv_planar));
  3817. }
  3818. if (source->yuv.v_planar) {
  3819. /* Program v plane address if necessary. */
  3820. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.v_planar));
  3821. }
  3822. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source->format) |
  3823. filter_mode | uv_swiz | yuv2rgb | linear_tile | conversion | src_premultiply_enable));
  3824. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, paint_color));
  3825. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source->address));
  3826. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, tiled_source));
  3827. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  3828. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source->width | (source->height << 16)));
  3829. /* Work on path states. */
  3830. matrix = path_matrix;
  3831. if (ts_is_fullscreen == 0) {
  3832. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], matrix);
  3833. point_min = point_max = temp;
  3834. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], matrix);
  3835. if (temp.x < point_min.x) point_min.x = temp.x;
  3836. if (temp.y < point_min.y) point_min.y = temp.y;
  3837. if (temp.x > point_max.x) point_max.x = temp.x;
  3838. if (temp.y > point_max.y) point_max.y = temp.y;
  3839. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], matrix);
  3840. if (temp.x < point_min.x) point_min.x = temp.x;
  3841. if (temp.y < point_min.y) point_min.y = temp.y;
  3842. if (temp.x > point_max.x) point_max.x = temp.x;
  3843. if (temp.y > point_max.y) point_max.y = temp.y;
  3844. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], matrix);
  3845. if (temp.x < point_min.x) point_min.x = temp.x;
  3846. if (temp.y < point_min.y) point_min.y = temp.y;
  3847. if (temp.x > point_max.x) point_max.x = temp.x;
  3848. if (temp.y > point_max.y) point_max.y = temp.y;
  3849. point_min.x = MAX(point_min.x, 0);
  3850. point_min.y = MAX(point_min.y, 0);
  3851. point_max.x = MIN(point_max.x, target->width);
  3852. point_max.y = MIN(point_max.y, target->height);
  3853. if (s_context.scissor_set) {
  3854. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  3855. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  3856. point_max.x = MIN(point_max.x, s_context.scissor[2]);
  3857. point_max.y = MIN(point_max.y, s_context.scissor[3]);
  3858. }
  3859. }
  3860. Scale = 1.0f;
  3861. Bias = 0.0f;
  3862. new_matrix[0] = matrix->m[0][0] * Scale;
  3863. new_matrix[1] = matrix->m[0][1] * Scale;
  3864. new_matrix[2] = (matrix->m[0][0] + matrix->m[0][1]) * Bias + matrix->m[0][2];
  3865. new_matrix[3] = matrix->m[1][0] * Scale;
  3866. new_matrix[4] = matrix->m[1][1] * Scale;
  3867. new_matrix[5] = (matrix->m[1][0] + matrix->m[1][1]) * Bias + matrix->m[1][2];
  3868. /* Convert states into hardware values. */
  3869. blend_mode = convert_blend(blend);
  3870. format = convert_path_format(path->format);
  3871. quality = convert_path_quality(path->quality);
  3872. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  3873. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  3874. tessellation_size = s_context.tessbuf.tessbuf_size;
  3875. /* Setup the command buffer. */
  3876. /* Program color register. */
  3877. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x01000002 | s_context.capabilities.cap.tiled | in_premult | image_mode | blend_mode | transparency_mode | s_context.enable_mask | s_context.color_transform | s_context.matrix_enable | s_context.scissor_enable));
  3878. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000400 | format | quality | tiling | fill));
  3879. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  3880. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  3881. /* Program matrix. */
  3882. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &new_matrix[0]));
  3883. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &new_matrix[1]));
  3884. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &new_matrix[2]));
  3885. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &new_matrix[3]));
  3886. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &new_matrix[4]));
  3887. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &new_matrix[5]));
  3888. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACD, (void *) &matrix->m[0][2]));
  3889. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACE, (void *) &matrix->m[1][2]));
  3890. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1)
  3891. {
  3892. if (path->path_changed != 0) {
  3893. if (path->uploaded.handle != NULL) {
  3894. free_memory.memory_handle = path->uploaded.handle;
  3895. vg_lite_kernel(VG_LITE_FREE, &free_memory);
  3896. path->uploaded.address = 0;
  3897. path->uploaded.memory = NULL;
  3898. path->uploaded.handle = NULL;
  3899. }
  3900. /* Allocate memory for the path data. */
  3901. memory.bytes = 16 + VG_LITE_ALIGN(path->path_length, 8);
  3902. return_offset = (8 + VG_LITE_ALIGN(path->path_length, 8)) / 4;
  3903. memory.contiguous = 1;
  3904. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &memory));
  3905. ((uint64_t *) memory.memory)[(path->path_length + 7) / 8] = 0;
  3906. ((uint32_t *) memory.memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  3907. ((uint32_t *) memory.memory)[1] = 0;
  3908. memcpy((uint8_t *) memory.memory + 8, path->path, path->path_length);
  3909. ((uint32_t *) memory.memory)[return_offset] = VG_LITE_RETURN();
  3910. ((uint32_t *) memory.memory)[return_offset + 1] = 0;
  3911. path->uploaded.handle = memory.memory_handle;
  3912. path->uploaded.memory = memory.memory;
  3913. path->uploaded.address = memory.memory_gpu;
  3914. path->uploaded.bytes = memory.bytes;
  3915. path->path_changed = 0;
  3916. }
  3917. }
  3918. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3919. vglitemDUMP_BUFFER("path", (size_t)path->uploaded.address, (uint8_t *)(path->uploaded.memory), 0, path->uploaded.bytes);
  3920. }
  3921. #if !DUMP_COMMAND_CAPTURE
  3922. vglitemDUMP("@[memory 0x%08X 0x%08X]", s_context.tessbuf.physical_addr, s_context.tessbuf.tessbuf_size);
  3923. #endif
  3924. if (width + point_min.x > target->width) {
  3925. width = target->width - point_min.x;
  3926. }
  3927. #if (gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_512_PARALLEL_PATHS)
  3928. {
  3929. /* Tessellate path. */
  3930. s_context.tessbuf.tess_w_h = width | (height << 16);
  3931. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  3932. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  3933. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (point_min.y << 16)));
  3934. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, s_context.tessbuf.tess_w_h));
  3935. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3936. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  3937. } else {
  3938. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO)
  3939. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  3940. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  3941. format = convert_path_format(VG_LITE_FP32);
  3942. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  3943. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  3944. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  3945. }
  3946. }
  3947. }
  3948. #else
  3949. {
  3950. height = s_context.tessbuf.tess_w_h >> 16;
  3951. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO) {
  3952. #if gcFEATURE_VG_512_PARALLEL_PATHS
  3953. if (height <= 128)
  3954. parallel_workpaths1 = 4;
  3955. else
  3956. parallel_workpaths1 = height * 128 / 4096 - 1;
  3957. if (parallel_workpaths1 > parallel_workpaths2)
  3958. parallel_workpaths1 = parallel_workpaths2;
  3959. #endif
  3960. for (y = point_min.y; y < point_max.y; y += height) {
  3961. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  3962. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  3963. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  3964. if (y + height > target->height) {
  3965. temp_height = target->height - y;
  3966. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (temp_height << 16)));
  3967. }
  3968. else {
  3969. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (height << 16)));
  3970. }
  3971. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3972. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  3973. } else {
  3974. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  3975. #if gcFEATURE_VG_512_PARALLEL_PATHS
  3976. s_context.path_counter ++;
  3977. if (parallel_workpaths1 == s_context.path_counter) {
  3978. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  3979. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  3980. s_context.path_counter = 0;
  3981. }
  3982. #endif
  3983. }
  3984. }
  3985. }
  3986. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  3987. for (y = point_min.y; y < point_max.y; y += height) {
  3988. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  3989. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  3990. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  3991. if (y + height > target->height) {
  3992. temp_height = target->height - y;
  3993. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (temp_height << 16)));
  3994. }
  3995. else {
  3996. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (height << 16)));
  3997. }
  3998. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  3999. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  4000. } else {
  4001. format = convert_path_format(VG_LITE_FP32);
  4002. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  4003. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  4004. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  4005. #if gcFEATURE_VG_512_PARALLEL_PATHS
  4006. s_context.path_counter ++;
  4007. if (parallel_workpaths1 == s_context.path_counter) {
  4008. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  4009. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  4010. s_context.path_counter = 0;
  4011. }
  4012. #endif
  4013. }
  4014. }
  4015. }
  4016. }
  4017. #endif
  4018. /* Finialize command buffer. */
  4019. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  4020. #if gcFEATURE_VG_GLOBAL_ALPHA
  4021. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  4022. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_NORMAL, 0xFF));
  4023. }
  4024. #endif
  4025. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride)*(source->height));
  4026. #if DUMP_IMAGE
  4027. dump_img(source->memory, source->width, source->height, source->format);
  4028. #endif
  4029. return error;
  4030. #else
  4031. return VG_LITE_NOT_SUPPORT;
  4032. #endif
  4033. }
  4034. /* GC555 vg_lite_draw_radial_grad API implementation
  4035. */
  4036. vg_lite_error_t vg_lite_draw_radial_grad(vg_lite_buffer_t* target,
  4037. vg_lite_path_t* path,
  4038. vg_lite_fill_t fill_rule,
  4039. vg_lite_matrix_t* path_matrix,
  4040. vg_lite_radial_gradient_t* grad,
  4041. vg_lite_color_t paint_color,
  4042. vg_lite_blend_t blend,
  4043. vg_lite_filter_t filter)
  4044. {
  4045. #if DUMP_API
  4046. FUNC_DUMP(vg_lite_draw_radial_grad)(target, path, fill_rule, path_matrix, grad, paint_color, blend, filter);
  4047. #endif
  4048. #if gcFEATURE_VG_RADIAL_GRADIENT && gcFEATURE_VG_IM_INPUT
  4049. vg_lite_error_t error = VG_LITE_SUCCESS;
  4050. uint32_t imageMode = 0;
  4051. uint32_t blend_mode;
  4052. uint32_t filter_mode = 0;
  4053. uint32_t conversion = 0;
  4054. uint32_t tiled_source;
  4055. vg_lite_matrix_t inverse_matrix;
  4056. vg_lite_float_t x_step[3];
  4057. vg_lite_float_t y_step[3];
  4058. vg_lite_float_t c_step[3];
  4059. vg_lite_buffer_t* source = &grad->image;
  4060. vg_lite_matrix_t* matrix = &grad->matrix;
  4061. uint32_t rad_tile = 0;
  4062. uint32_t transparency_mode = 0;
  4063. uint32_t yuv2rgb = 0;
  4064. uint32_t uv_swiz = 0;
  4065. void* data;
  4066. uint32_t compress_mode;
  4067. uint32_t in_premult = 0;
  4068. uint32_t src_premultiply_enable = 0;
  4069. uint32_t premul_flag = 0;
  4070. uint32_t prediv_flag = 0;
  4071. /* The following code is from "draw path" */
  4072. uint32_t format, quality, tiling, fill;
  4073. uint32_t tessellation_size;
  4074. vg_lite_kernel_allocate_t memory;
  4075. vg_lite_kernel_free_t free_memory;
  4076. uint32_t return_offset = 0;
  4077. vg_lite_point_t point_min = { 0 }, point_max = { 0 }, temp = { 0 };
  4078. int width, height;
  4079. uint8_t ts_is_fullscreen = 0;
  4080. float new_matrix[6];
  4081. float Scale, Bias;
  4082. vg_lite_float_t radius;
  4083. vg_lite_float_t centerX, centerY;
  4084. vg_lite_float_t focalX, focalY;
  4085. vg_lite_float_t fx, fy;
  4086. vg_lite_float_t fxfy_2;
  4087. vg_lite_float_t radius2;
  4088. vg_lite_float_t r2_fx2, r2_fy2;
  4089. vg_lite_float_t r2_fx2_2, r2_fy2_2;
  4090. vg_lite_float_t r2_fx2_fy2;
  4091. vg_lite_float_t r2_fx2_fy2sq;
  4092. vg_lite_float_t cx, cy;
  4093. vg_lite_float_t rgConstantLin, rgStepXLin, rgStepYLin;
  4094. vg_lite_float_t rgConstantRad, rgStepXRad, rgStepYRad;
  4095. vg_lite_float_t rgStepXXRad, rgStepYYRad, rgStepXYRad;
  4096. int y;
  4097. int temp_height = 0;
  4098. #if !gcFEATURE_VG_PARALLEL_PATHS
  4099. uint32_t parallel_workpaths1 = 2;
  4100. uint32_t parallel_workpaths2 = 2;
  4101. #endif
  4102. #if gcFEATURE_VG_TRACE_API
  4103. VGLITE_LOG("vg_lite_draw_radial_grad %p %p %d %p %p 0x%08X %d %d\n",
  4104. target, path, fill_rule, path_matrix, grad, paint_color, blend, filter);
  4105. #endif
  4106. #if gcFEATURE_VG_ERROR_CHECK
  4107. #if !gcFEATURE_VG_QUALITY_8X
  4108. if (path->quality == VG_LITE_UPPER) {
  4109. return VG_LITE_NOT_SUPPORT;
  4110. }
  4111. #endif
  4112. #if !gcFEATURE_VG_LVGL_SUPPORT
  4113. if ((blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  4114. return VG_LITE_NOT_SUPPORT;
  4115. }
  4116. #endif
  4117. #if !gcFEATURE_VG_24BIT
  4118. if (target->format >= VG_LITE_RGB888 && target->format <= VG_LITE_RGBA5658) {
  4119. return VG_LITE_NOT_SUPPORT;
  4120. }
  4121. #endif
  4122. #if !gcFEATURE_VG_STENCIL
  4123. if (source->image_mode == VG_LITE_STENCIL_MODE) {
  4124. return VG_LITE_NOT_SUPPORT;
  4125. }
  4126. #endif
  4127. #if !gcFEATURE_VG_NEW_BLEND_MODE
  4128. if (blend == VG_LITE_BLEND_DARKEN || blend == VG_LITE_BLEND_LIGHTEN) {
  4129. return VG_LITE_NOT_SUPPORT;
  4130. }
  4131. #endif
  4132. #if !gcFEATURE_VG_IM_REPEAT_REFLECT
  4133. if (grad->spread_mode == VG_LITE_GRADIENT_SPREAD_REPEAT || grad->spread_mode == VG_LITE_GRADIENT_SPREAD_REFLECT) {
  4134. return VG_LITE_NOT_SUPPORT;
  4135. }
  4136. #endif
  4137. if (source->format == VG_LITE_A4 || source->format == VG_LITE_A8) {
  4138. return VG_LITE_NOT_SUPPORT;
  4139. }
  4140. if (!path || !path->path) {
  4141. return VG_LITE_INVALID_ARGUMENT;
  4142. }
  4143. radius = grad->radial_grad.r;
  4144. if (radius < 0) {
  4145. return VG_LITE_INVALID_ARGUMENT;
  4146. }
  4147. VG_LITE_RETURN_ERROR(check_compress(source->format, source->compress_mode, source->tiled, source->width, source->height));
  4148. #endif /* gcFEATURE_VG_ERROR_CHECK */
  4149. if (!path->path_length) {
  4150. return VG_LITE_SUCCESS;
  4151. }
  4152. if (!path_matrix) {
  4153. path_matrix = &identity_mtx;
  4154. }
  4155. #if gcFEATURE_VG_GAMMA
  4156. set_gamma_dest_only(target, VGL_TRUE);
  4157. #endif
  4158. #if gcFEATURE_VG_GLOBAL_ALPHA
  4159. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  4160. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_GLOBAL, 0xff));
  4161. }
  4162. #endif
  4163. /*blend input into context*/
  4164. s_context.blend_mode = blend;
  4165. src_premultiply_enable = 0x01000100;
  4166. if (s_context.color_transform == 0 && s_context.gamma_dst == s_context.gamma_src && s_context.matrix_enable == 0 && s_context.dst_alpha_mode == 0 && s_context.src_alpha_mode == 0 &&
  4167. (source->image_mode == VG_LITE_NORMAL_IMAGE_MODE || source->image_mode == 0)) {
  4168. prediv_flag = 0;
  4169. }
  4170. else {
  4171. prediv_flag = 1;
  4172. }
  4173. if ((s_context.blend_mode >= OPENVG_BLEND_SRC_OVER && s_context.blend_mode <= OPENVG_BLEND_ADDITIVE) || source->image_mode == VG_LITE_STENCIL_MODE
  4174. || (s_context.blend_mode >= VG_LITE_BLEND_NORMAL_LVGL && s_context.blend_mode <= VG_LITE_BLEND_MULTIPLY_LVGL)) {
  4175. premul_flag = 1;
  4176. }
  4177. else {
  4178. premul_flag = 0;
  4179. }
  4180. if ((source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 0) ||
  4181. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 0)) {
  4182. src_premultiply_enable = 0x01000100;
  4183. in_premult = 0x10000000;
  4184. }
  4185. /* when src and dst all pre format, im pre_out set to 0 to perform data truncation to prevent data overflow */
  4186. else if (source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 0) {
  4187. src_premultiply_enable = 0x00000100;
  4188. in_premult = 0x00000000;
  4189. }
  4190. else if ((source->premultiplied == 0 && target->premultiplied == 1) ||
  4191. (source->premultiplied == 0 && target->premultiplied == 0 && premul_flag == 1)) {
  4192. src_premultiply_enable = 0x01000100;
  4193. in_premult = 0x00000000;
  4194. }
  4195. else if ((source->premultiplied == 1 && target->premultiplied == 1 && prediv_flag == 1) ||
  4196. (source->premultiplied == 1 && target->premultiplied == 0 && prediv_flag == 1)) {
  4197. src_premultiply_enable = 0x00000100;
  4198. in_premult = 0x00000000;
  4199. }
  4200. if ((source->format == VG_LITE_A4 || source->format == VG_LITE_A8) && blend >= VG_LITE_BLEND_SRC_OVER && blend <= VG_LITE_BLEND_SUBTRACT) {
  4201. #if (CHIPID==0x255)
  4202. src_premultiply_enable = 0x00000000;
  4203. #endif
  4204. in_premult = 0x00000000;
  4205. }
  4206. if (source->premultiplied == target->premultiplied && premul_flag == 0) {
  4207. target->apply_premult = 1;
  4208. }
  4209. else {
  4210. target->apply_premult = 0;
  4211. }
  4212. error = set_render_target(target);
  4213. if (error != VG_LITE_SUCCESS) {
  4214. return error;
  4215. } else if (error == VG_LITE_NO_CONTEXT) {
  4216. /* If scissoring is enabled and no valid scissoring rectangles
  4217. are present, no drawing occurs */
  4218. return VG_LITE_SUCCESS;
  4219. }
  4220. if ((target->format == VG_LITE_YUYV || target->format == VG_LITE_YUY2 || target->format == VG_LITE_YUY2_TILED
  4221. || target->format == VG_LITE_AYUY2 || target->format == VG_LITE_AYUY2_TILED)
  4222. && path->quality != VG_LITE_LOW)
  4223. {
  4224. path->quality = VG_LITE_LOW;
  4225. printf("If target is YUV group , the path qulity should use VG_LITE_LOW.\n");
  4226. }
  4227. transparency_mode = (source->transparency_mode == VG_LITE_IMAGE_TRANSPARENT ? 0x8000:0);
  4228. width = s_context.tessbuf.tess_w_h & 0xFFFF;
  4229. height = s_context.tessbuf.tess_w_h >> 16;
  4230. if (width == 0 || height == 0)
  4231. return VG_LITE_NO_CONTEXT;
  4232. if ((target->width <= width) && (target->height <= height) && (!s_context.scissor_set))
  4233. {
  4234. ts_is_fullscreen = 1;
  4235. point_min.x = 0;
  4236. point_min.y = 0;
  4237. point_max.x = target->width;
  4238. point_max.y = target->height;
  4239. }
  4240. /* If target is L8 and source is in YUV or RGB (not L8 or A8) then we have to convert RGB into L8. */
  4241. if ((target->format == VG_LITE_L8) && ((source->format != VG_LITE_L8) && (source->format != VG_LITE_A8))) {
  4242. conversion = 0x80000000;
  4243. }
  4244. /* Determine image mode (NORMAL, NONE , MULTIPLY or STENCIL) depending on the color. */
  4245. switch (source->image_mode) {
  4246. case VG_LITE_NONE_IMAGE_MODE:
  4247. imageMode = 0x0;
  4248. break;
  4249. case VG_LITE_MULTIPLY_IMAGE_MODE:
  4250. return VG_LITE_INVALID_ARGUMENT;
  4251. case VG_LITE_NORMAL_IMAGE_MODE:
  4252. case VG_LITE_ZERO:
  4253. imageMode = 0x00001000;
  4254. break;
  4255. case VG_LITE_STENCIL_MODE:
  4256. imageMode = 0x00003000;
  4257. break;
  4258. case VG_LITE_RECOLOR_MODE:
  4259. imageMode = 0x00006000;
  4260. break;
  4261. }
  4262. switch (filter) {
  4263. case VG_LITE_FILTER_POINT:
  4264. filter_mode = 0;
  4265. break;
  4266. case VG_LITE_FILTER_LINEAR:
  4267. filter_mode = 0x10000;
  4268. break;
  4269. case VG_LITE_FILTER_BI_LINEAR:
  4270. filter_mode = 0x20000;
  4271. break;
  4272. case VG_LITE_FILTER_GAUSSIAN:
  4273. filter_mode = 0x30000;
  4274. break;
  4275. }
  4276. tiled_source = (source->tiled != VG_LITE_LINEAR) ? 0x10000000 : 0 ;
  4277. switch (grad->spread_mode) {
  4278. case VG_LITE_GRADIENT_SPREAD_FILL:
  4279. rad_tile = 0x0;
  4280. break;
  4281. case VG_LITE_GRADIENT_SPREAD_PAD:
  4282. rad_tile = 0x1000;
  4283. break;
  4284. case VG_LITE_GRADIENT_SPREAD_REPEAT:
  4285. rad_tile = 0x2000;
  4286. break;
  4287. case VG_LITE_GRADIENT_SPREAD_REFLECT:
  4288. rad_tile = 0x3000;
  4289. break;
  4290. }
  4291. compress_mode = (uint32_t)source->compress_mode << 25;
  4292. if (grad->spread_mode == VG_LITE_GRADIENT_SPREAD_FILL)
  4293. {
  4294. uint8_t a,r,g,b;
  4295. a = paint_color >> 24;
  4296. r = paint_color >> 16;
  4297. g = paint_color >> 8;
  4298. b = paint_color;
  4299. paint_color = (a << 24) | (b << 16) | (g << 8) | r;
  4300. }
  4301. /* compute radial gradient paremeters */
  4302. /* Compute inverse matrix. */
  4303. if (!inverse(&inverse_matrix, matrix))
  4304. return VG_LITE_INVALID_ARGUMENT;
  4305. /* Make shortcuts to the gradient information. */
  4306. centerX = grad->radial_grad.cx;
  4307. centerY = grad->radial_grad.cy;
  4308. focalX = grad->radial_grad.fx;
  4309. focalY = grad->radial_grad.fy;
  4310. /* Compute constants of the equation. */
  4311. fx = focalX - centerX;
  4312. fy = focalY - centerY;
  4313. radius2 = radius * radius;
  4314. if (fx*fx + fy*fy > radius2)
  4315. {
  4316. /* If the focal point is outside the circle, let's move it
  4317. to inside the circle. Per vg11 spec pg125 "If (fx, fy) lies outside ...
  4318. For here, we set it at 0.9 ratio to the center.
  4319. */
  4320. vg_lite_float_t fr = (vg_lite_float_t)sqrt(fx*fx + fy*fy);
  4321. fx = radius * fx / fr * 0.9f;
  4322. fy = radius * fy / fr * 0.9f;
  4323. focalX = grad->radial_grad.fx + fx;
  4324. focalY = grad->radial_grad.fy + fy;
  4325. }
  4326. fxfy_2 = 2.0f * fx * fy;
  4327. r2_fx2 = radius2 - fx * fx;
  4328. r2_fy2 = radius2 - fy * fy;
  4329. r2_fx2_2 = 2.0f * r2_fx2;
  4330. r2_fy2_2 = 2.0f * r2_fy2;
  4331. #if gcFEATURE_VG_MATH_PRECISION_FIX
  4332. r2_fx2_fy2 = (r2_fx2 - fy * fy) / source->width;
  4333. r2_fx2_fy2sq = (r2_fx2_fy2 * r2_fx2_fy2);
  4334. #else
  4335. r2_fx2_fy2 = r2_fx2 - fy * fy;
  4336. r2_fx2_fy2sq = r2_fx2_fy2 * r2_fx2_fy2;
  4337. #endif
  4338. /* _____________________________________
  4339. ** dx fx + dy fy + \/r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  4340. ** g = -------------------------------------------------------
  4341. ** r^2 - fx^2 - fy^2
  4342. **
  4343. ** Where
  4344. **
  4345. ** dx := F(x) - focalX
  4346. ** dy := F(y) - focalY
  4347. ** fx := focalX - centerX
  4348. ** fy := focalX - centerY
  4349. **
  4350. ** and
  4351. **
  4352. ** F(x) := (x + 0.5) m00 + (y + 0.5) m01 + m02
  4353. ** F(y) := (x + 0.5) m10 + (y + 0.5) m11 + m12
  4354. **
  4355. ** So, dx can be factored into
  4356. **
  4357. ** dx = (x + 0.5) m00 + (y + 0.5) m01 + m02 - focalX
  4358. ** = x m00 + y m01 + 0.5 m00 + 0.5 m01 + m02 - focalX
  4359. **
  4360. ** = x m00 + y m01 + cx
  4361. **
  4362. ** where
  4363. **
  4364. ** cx := 0.5 m00 + 0.5 m01 + m02 - focalX
  4365. **
  4366. ** The same way we can factor dy into
  4367. **
  4368. ** dy = x m10 + y m11 + cy
  4369. **
  4370. ** where
  4371. **
  4372. ** cy := 0.5 m10 + 0.5 m11 + m12 - focalY.
  4373. **
  4374. ** Now we can rewrite g as
  4375. ** ______________________________________
  4376. ** dx fx + dy fy / r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  4377. ** g = ----------------- + \ / -------------------------------------
  4378. ** r^2 - fx^2 - fy^2 \/ (r^2 - fx^2 - fy^2)^2
  4379. ** ____
  4380. ** = gLin + \/gRad
  4381. **
  4382. ** where
  4383. **
  4384. ** dx fx + dy fy
  4385. ** gLin := -----------------
  4386. ** r^2 - fx^2 - fy^2
  4387. **
  4388. ** r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  4389. ** gRad := -------------------------------------
  4390. ** (r^2 - fx^2 - fy^2)^2
  4391. */
  4392. cx
  4393. = 0.5f * ( MAT(&inverse_matrix, 0, 0) + MAT(&inverse_matrix, 0, 1) )
  4394. + MAT(&inverse_matrix, 0, 2)
  4395. - focalX;
  4396. cy
  4397. = 0.5f * ( MAT(&inverse_matrix, 1, 0) + MAT(&inverse_matrix, 1, 1) )
  4398. + MAT(&inverse_matrix, 1, 2)
  4399. - focalY;
  4400. /*
  4401. ** dx fx + dy fy
  4402. ** gLin := -----------------
  4403. ** r^2 - fx^2 - fy^2
  4404. **
  4405. ** We can factor the top half into
  4406. **
  4407. ** = (x m00 + y m01 + cx) fx + (x m10 + y m11 + cy) fy
  4408. **
  4409. ** = x (m00 fx + m10 fy)
  4410. ** + y (m01 fx + m11 fy)
  4411. ** + cx fx + cy fy.
  4412. */
  4413. rgStepXLin
  4414. = ( MAT(&inverse_matrix, 0, 0) * fx + MAT(&inverse_matrix, 1, 0) * fy )
  4415. / r2_fx2_fy2;
  4416. rgStepYLin
  4417. = ( MAT(&inverse_matrix, 0, 1) * fx + MAT(&inverse_matrix, 1, 1) * fy )
  4418. / r2_fx2_fy2;
  4419. rgConstantLin = ( cx * fx + cy * fy ) / r2_fx2_fy2;
  4420. /*
  4421. ** r^2 (dx^2 + dy^2) - (dx fy - dy fx)^2
  4422. ** gRad := -------------------------------------
  4423. ** (r^2 - fx^2 - fy^2)^2
  4424. **
  4425. ** r^2 (dx^2 + dy^2) - dx^2 fy^2 - dy^2 fx^2 + 2 dx dy fx fy
  4426. ** := ---------------------------------------------------------
  4427. ** (r^2 - fx^2 - fy^2)^2
  4428. **
  4429. ** dx^2 (r^2 - fy^2) + dy^2 (r^2 - fx^2) + 2 dx dy fx fy
  4430. ** := -----------------------------------------------------
  4431. ** (r^2 - fx^2 - fy^2)^2
  4432. **
  4433. ** First, lets factor dx^2 into
  4434. **
  4435. ** dx^2 = (x m00 + y m01 + cx)^2
  4436. ** = x^2 m00^2 + y^2 m01^2 + 2 x y m00 m01
  4437. ** + 2 x m00 cx + 2 y m01 cx + cx^2
  4438. **
  4439. ** = x^2 (m00^2)
  4440. ** + y^2 (m01^2)
  4441. ** + x y (2 m00 m01)
  4442. ** + x (2 m00 cx)
  4443. ** + y (2 m01 cx)
  4444. ** + cx^2.
  4445. **
  4446. ** The same can be done for dy^2:
  4447. **
  4448. ** dy^2 = x^2 (m10^2)
  4449. ** + y^2 (m11^2)
  4450. ** + x y (2 m10 m11)
  4451. ** + x (2 m10 cy)
  4452. ** + y (2 m11 cy)
  4453. ** + cy^2.
  4454. **
  4455. ** Let's also factor dx dy into
  4456. **
  4457. ** dx dy = (x m00 + y m01 + cx) (x m10 + y m11 + cy)
  4458. ** = x^2 m00 m10 + y^2 m01 m11 + x y m00 m11 + x y m01 m10
  4459. ** + x m00 cy + x m10 cx + y m01 cy + y m11 cx + cx cy
  4460. **
  4461. ** = x^2 (m00 m10)
  4462. ** + y^2 (m01 m11)
  4463. ** + x y (m00 m11 + m01 m10)
  4464. ** + x (m00 cy + m10 cx)
  4465. ** + y (m01 cy + m11 cx)
  4466. ** + cx cy.
  4467. **
  4468. ** Now that we have all this, lets look at the top of gRad.
  4469. **
  4470. ** = dx^2 (r^2 - fy^2) + dy^2 (r^2 - fx^2) + 2 dx dy fx fy
  4471. ** = x^2 m00^2 (r^2 - fy^2) + y^2 m01^2 (r^2 - fy^2)
  4472. ** + x y 2 m00 m01 (r^2 - fy^2) + x 2 m00 cx (r^2 - fy^2)
  4473. ** + y 2 m01 cx (r^2 - fy^2) + cx^2 (r^2 - fy^2)
  4474. ** + x^2 m10^2 (r^2 - fx^2) + y^2 m11^2 (r^2 - fx^2)
  4475. ** + x y 2 m10 m11 (r^2 - fx^2) + x 2 m10 cy (r^2 - fx^2)
  4476. ** + y 2 m11 cy (r^2 - fx^2) + cy^2 (r^2 - fx^2)
  4477. ** + x^2 m00 m10 2 fx fy + y^2 m01 m11 2 fx fy
  4478. ** + x y (m00 m11 + m01 m10) 2 fx fy
  4479. ** + x (m00 cy + m10 cx) 2 fx fy + y (m01 cy + m11 cx) 2 fx fy
  4480. ** + cx cy 2 fx fy
  4481. **
  4482. ** = x^2 ( m00^2 (r^2 - fy^2)
  4483. ** + m10^2 (r^2 - fx^2)
  4484. ** + m00 m10 2 fx fy
  4485. ** )
  4486. ** + y^2 ( m01^2 (r^2 - fy^2)
  4487. ** + m11^2 (r^2 - fx^2)
  4488. ** + m01 m11 2 fx fy
  4489. ** )
  4490. ** + x y ( 2 m00 m01 (r^2 - fy^2)
  4491. ** + 2 m10 m11 (r^2 - fx^2)
  4492. ** + (m00 m11 + m01 m10) 2 fx fy
  4493. ** )
  4494. ** + x ( 2 m00 cx (r^2 - fy^2)
  4495. ** + 2 m10 cy (r^2 - fx^2)
  4496. ** + (m00 cy + m10 cx) 2 fx fy
  4497. ** )
  4498. ** + y ( 2 m01 cx (r^2 - fy^2)
  4499. ** + 2 m11 cy (r^2 - fx^2)
  4500. ** + (m01 cy + m11 cx) 2 fx fy
  4501. ** )
  4502. ** + cx^2 (r^2 - fy^2) + cy^2 (r^2 - fx^2) + cx cy 2 fx fy.
  4503. */
  4504. rgStepXXRad =
  4505. (
  4506. MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 0, 0) * r2_fy2
  4507. + MAT(&inverse_matrix, 1, 0) * MAT(&inverse_matrix, 1, 0) * r2_fx2
  4508. + MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 1, 0) * fxfy_2
  4509. )
  4510. / r2_fx2_fy2sq;
  4511. rgStepYYRad =
  4512. (
  4513. MAT(&inverse_matrix, 0, 1) * MAT(&inverse_matrix, 0, 1) * r2_fy2
  4514. + MAT(&inverse_matrix, 1, 1) * MAT(&inverse_matrix, 1, 1) * r2_fx2
  4515. + MAT(&inverse_matrix, 0, 1) * MAT(&inverse_matrix, 1, 1) * fxfy_2
  4516. )
  4517. / r2_fx2_fy2sq;
  4518. rgStepXYRad =
  4519. (
  4520. MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 0, 1) * r2_fy2_2
  4521. + MAT(&inverse_matrix, 1, 0) * MAT(&inverse_matrix, 1, 1) * r2_fx2_2
  4522. + (
  4523. MAT(&inverse_matrix, 0, 0) * MAT(&inverse_matrix, 1, 1)
  4524. + MAT(&inverse_matrix, 0, 1) * MAT(&inverse_matrix, 1, 0)
  4525. )
  4526. * fxfy_2
  4527. )
  4528. / r2_fx2_fy2sq;
  4529. rgStepXRad =
  4530. (
  4531. MAT(&inverse_matrix, 0, 0) * cx * r2_fy2_2
  4532. + MAT(&inverse_matrix, 1, 0) * cy * r2_fx2_2
  4533. + (
  4534. MAT(&inverse_matrix, 0, 0) * cy
  4535. + MAT(&inverse_matrix, 1, 0) * cx
  4536. )
  4537. * fxfy_2
  4538. )
  4539. / r2_fx2_fy2sq;
  4540. rgStepYRad =
  4541. (
  4542. MAT(&inverse_matrix, 0, 1) * cx * r2_fy2_2
  4543. + MAT(&inverse_matrix, 1, 1) * cy * r2_fx2_2
  4544. + (
  4545. MAT(&inverse_matrix, 0, 1) * cy
  4546. + MAT(&inverse_matrix, 1, 1) * cx
  4547. )
  4548. * fxfy_2
  4549. )
  4550. / r2_fx2_fy2sq;
  4551. rgConstantRad =
  4552. (
  4553. cx * cx * r2_fy2
  4554. + cy * cy * r2_fx2
  4555. + cx * cy * fxfy_2
  4556. )
  4557. / r2_fx2_fy2sq;
  4558. /* Setup the command buffer. */
  4559. /* rad gradient parameters*/
  4560. data = &rgConstantLin;
  4561. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A04,*(uint32_t*) data));
  4562. data = &rgStepXLin;
  4563. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A06,*(uint32_t*) data));
  4564. data = &rgStepYLin;
  4565. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A08,*(uint32_t*) data));
  4566. data = &rgConstantRad;
  4567. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A05,*(uint32_t*) data));
  4568. data = &rgStepXRad;
  4569. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A07,*(uint32_t*) data));
  4570. data = &rgStepYRad;
  4571. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A09,*(uint32_t*) data));
  4572. data = &rgStepXXRad;
  4573. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A03,*(uint32_t*) data));
  4574. data = &rgStepYYRad;
  4575. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A0A,*(uint32_t*) data));
  4576. data = &rgStepXYRad;
  4577. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A0B,*(uint32_t*) data));
  4578. /* Compute inverse matrix. */
  4579. if (!inverse(&inverse_matrix, matrix))
  4580. return VG_LITE_INVALID_ARGUMENT;
  4581. #if gcFEATURE_VG_MATH_PRECISION_FIX
  4582. /* Compute interpolation steps. */
  4583. x_step[0] = inverse_matrix.m[0][0];
  4584. x_step[1] = inverse_matrix.m[1][0];
  4585. x_step[2] = inverse_matrix.m[2][0];
  4586. y_step[0] = inverse_matrix.m[0][1];
  4587. y_step[1] = inverse_matrix.m[1][1];
  4588. y_step[2] = inverse_matrix.m[2][1];
  4589. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]);
  4590. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]);
  4591. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  4592. #else
  4593. /* Compute interpolation steps. */
  4594. x_step[0] = inverse_matrix.m[0][0] / source->width;
  4595. x_step[1] = inverse_matrix.m[1][0] / source->height;
  4596. x_step[2] = inverse_matrix.m[2][0];
  4597. y_step[0] = inverse_matrix.m[0][1] / source->width;
  4598. y_step[1] = inverse_matrix.m[1][1] / source->height;
  4599. y_step[2] = inverse_matrix.m[2][1];
  4600. c_step[0] = (0.5f * (inverse_matrix.m[0][0] + inverse_matrix.m[0][1]) + inverse_matrix.m[0][2]) / source->width;
  4601. c_step[1] = (0.5f * (inverse_matrix.m[1][0] + inverse_matrix.m[1][1]) + inverse_matrix.m[1][2]) / source->height;
  4602. c_step[2] = 0.5f * (inverse_matrix.m[2][0] + inverse_matrix.m[2][1]) + inverse_matrix.m[2][2];
  4603. #endif
  4604. /* Setup the command buffer. */
  4605. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A18, (void *) &c_step[0]));
  4606. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A19, (void *) &c_step[1]));
  4607. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1A, (void *) &c_step[2]));
  4608. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1C, (void *) &x_step[0]));
  4609. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1D, (void *) &x_step[1]));
  4610. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A1E, (void *) &x_step[2]));
  4611. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1F, 0x00000001));
  4612. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A20, (void *) &y_step[0]));
  4613. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A21, (void *) &y_step[1]));
  4614. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A22, (void *) &y_step[2]));
  4615. if (((source->format >= VG_LITE_YUY2) &&
  4616. (source->format <= VG_LITE_AYUY2)) ||
  4617. ((source->format >= VG_LITE_YUY2_TILED) &&
  4618. (source->format <= VG_LITE_AYUY2_TILED))) {
  4619. yuv2rgb = convert_yuv2rgb(source->yuv.yuv2rgb);
  4620. uv_swiz = convert_uv_swizzle(source->yuv.swizzle);
  4621. }
  4622. if (source->yuv.uv_planar) {
  4623. /* Program u plane address if necessary. */
  4624. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A51, source->yuv.uv_planar));
  4625. }
  4626. if (source->yuv.v_planar) {
  4627. /* Program v plane address if necessary. */
  4628. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A53, source->yuv.v_planar));
  4629. }
  4630. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A24, convert_source_format(source->format) |
  4631. filter_mode | uv_swiz | yuv2rgb | rad_tile | conversion | src_premultiply_enable | compress_mode));
  4632. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A26, paint_color));
  4633. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A28, source->address));
  4634. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2A, tiled_source));
  4635. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2C, 0));
  4636. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A2E, source->width | (source->height << 16)));
  4637. /* Work on path states. */
  4638. matrix = path_matrix;
  4639. if (ts_is_fullscreen == 0) {
  4640. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[1], matrix);
  4641. point_min = point_max = temp;
  4642. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[1], matrix);
  4643. if (temp.x < point_min.x) point_min.x = temp.x;
  4644. if (temp.y < point_min.y) point_min.y = temp.y;
  4645. if (temp.x > point_max.x) point_max.x = temp.x;
  4646. if (temp.y > point_max.y) point_max.y = temp.y;
  4647. transform(&temp, (vg_lite_float_t)path->bounding_box[2], (vg_lite_float_t)path->bounding_box[3], matrix);
  4648. if (temp.x < point_min.x) point_min.x = temp.x;
  4649. if (temp.y < point_min.y) point_min.y = temp.y;
  4650. if (temp.x > point_max.x) point_max.x = temp.x;
  4651. if (temp.y > point_max.y) point_max.y = temp.y;
  4652. transform(&temp, (vg_lite_float_t)path->bounding_box[0], (vg_lite_float_t)path->bounding_box[3], matrix);
  4653. if (temp.x < point_min.x) point_min.x = temp.x;
  4654. if (temp.y < point_min.y) point_min.y = temp.y;
  4655. if (temp.x > point_max.x) point_max.x = temp.x;
  4656. if (temp.y > point_max.y) point_max.y = temp.y;
  4657. point_min.x = MAX(point_min.x, 0);
  4658. point_min.y = MAX(point_min.y, 0);
  4659. point_max.x = MIN(point_max.x, target->width);
  4660. point_max.y = MIN(point_max.y, target->height);
  4661. if (s_context.scissor_set) {
  4662. point_min.x = MAX(point_min.x, s_context.scissor[0]);
  4663. point_min.y = MAX(point_min.y, s_context.scissor[1]);
  4664. point_max.x = MIN(point_max.x, s_context.scissor[2]);
  4665. point_max.y = MIN(point_max.y, s_context.scissor[3]);
  4666. }
  4667. }
  4668. Scale = 1.0f;
  4669. Bias = 0.0f;
  4670. new_matrix[0] = matrix->m[0][0] * Scale;
  4671. new_matrix[1] = matrix->m[0][1] * Scale;
  4672. new_matrix[2] = (matrix->m[0][0] + matrix->m[0][1]) * Bias + matrix->m[0][2];
  4673. new_matrix[3] = matrix->m[1][0] * Scale;
  4674. new_matrix[4] = matrix->m[1][1] * Scale;
  4675. new_matrix[5] = (matrix->m[1][0] + matrix->m[1][1]) * Bias + matrix->m[1][2];
  4676. /* Convert states into hardware values. */
  4677. blend_mode = convert_blend(blend);
  4678. format = convert_path_format(path->format);
  4679. quality = convert_path_quality(path->quality);
  4680. tiling = (s_context.capabilities.cap.tiled == 2) ? 0x2000000 : 0;
  4681. fill = (fill_rule == VG_LITE_FILL_EVEN_ODD) ? 0x10 : 0;
  4682. tessellation_size = s_context.tessbuf.tessbuf_size;
  4683. /* Setup the command buffer. */
  4684. /* Program color register. */
  4685. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A00, 0x02000002 | s_context.capabilities.cap.tiled | in_premult | imageMode | blend_mode | transparency_mode | s_context.enable_mask | s_context.color_transform | s_context.matrix_enable | s_context.scissor_enable));
  4686. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000000 | format | quality | tiling | fill));
  4687. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3B, 0x3F800000)); /* Path tessellation SCALE. */
  4688. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3C, 0x00000000)); /* Path tessellation BIAS. */
  4689. /* Program matrix. */
  4690. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A40, (void *) &new_matrix[0]));
  4691. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A41, (void *) &new_matrix[1]));
  4692. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A42, (void *) &new_matrix[2]));
  4693. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A43, (void *) &new_matrix[3]));
  4694. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A44, (void *) &new_matrix[4]));
  4695. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0A45, (void *) &new_matrix[5]));
  4696. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACD, (void *) &matrix->m[0][2]));
  4697. VG_LITE_RETURN_ERROR(push_state_ptr(&s_context, 0x0ACE, (void *) &matrix->m[1][2]));
  4698. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1)
  4699. {
  4700. if (path->path_changed != 0) {
  4701. if (path->uploaded.handle != NULL) {
  4702. free_memory.memory_handle = path->uploaded.handle;
  4703. vg_lite_kernel(VG_LITE_FREE, &free_memory);
  4704. path->uploaded.address = 0;
  4705. path->uploaded.memory = NULL;
  4706. path->uploaded.handle = NULL;
  4707. }
  4708. /* Allocate memory for the path data. */
  4709. memory.bytes = 16 + VG_LITE_ALIGN(path->path_length, 8);
  4710. return_offset = (8 + VG_LITE_ALIGN(path->path_length, 8)) / 4;
  4711. memory.contiguous = 1;
  4712. VG_LITE_RETURN_ERROR(vg_lite_kernel(VG_LITE_ALLOCATE, &memory));
  4713. ((uint64_t *) memory.memory)[(path->path_length + 7) / 8] = 0;
  4714. ((uint32_t *) memory.memory)[0] = VG_LITE_DATA((path->path_length + 7) / 8);
  4715. ((uint32_t *) memory.memory)[1] = 0;
  4716. memcpy((uint8_t *) memory.memory + 8, path->path, path->path_length);
  4717. ((uint32_t *) memory.memory)[return_offset] = VG_LITE_RETURN();
  4718. ((uint32_t *) memory.memory)[return_offset + 1] = 0;
  4719. path->uploaded.handle = memory.memory_handle;
  4720. path->uploaded.memory = memory.memory;
  4721. path->uploaded.address = memory.memory_gpu;
  4722. path->uploaded.bytes = memory.bytes;
  4723. path->path_changed = 0;
  4724. }
  4725. }
  4726. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  4727. vglitemDUMP_BUFFER("path", (size_t)path->uploaded.address, (uint8_t *)(path->uploaded.memory), 0, path->uploaded.bytes);
  4728. }
  4729. #if !DUMP_COMMAND_CAPTURE
  4730. vglitemDUMP("@[memory 0x%08X 0x%08X]", s_context.tessbuf.physical_addr, s_context.tessbuf.tessbuf_size);
  4731. #endif
  4732. if (width + point_min.x > target->width) {
  4733. width = target->width - point_min.x;
  4734. }
  4735. #if (gcFEATURE_VG_PARALLEL_PATHS && gcFEATURE_VG_512_PARALLEL_PATHS)
  4736. {
  4737. /* Tessellate path. */
  4738. s_context.tessbuf.tess_w_h = width | (height << 16);
  4739. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  4740. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  4741. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (point_min.y << 16)));
  4742. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, s_context.tessbuf.tess_w_h));
  4743. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  4744. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  4745. } else {
  4746. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO)
  4747. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  4748. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  4749. format = convert_path_format(VG_LITE_FP32);
  4750. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  4751. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  4752. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  4753. }
  4754. }
  4755. }
  4756. #else
  4757. {
  4758. height = s_context.tessbuf.tess_w_h >> 16;
  4759. if (path->path_type == VG_LITE_DRAW_FILL_PATH || path->path_type == VG_LITE_DRAW_ZERO) {
  4760. #if gcFEATURE_VG_512_PARALLEL_PATHS
  4761. if (height <= 128)
  4762. parallel_workpaths1 = 4;
  4763. else
  4764. parallel_workpaths1 = height * 128 / 4096 - 1;
  4765. if (parallel_workpaths1 > parallel_workpaths2)
  4766. parallel_workpaths1 = parallel_workpaths2;
  4767. #endif
  4768. for (y = point_min.y; y < point_max.y; y += height) {
  4769. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  4770. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  4771. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  4772. if (y + height > target->height) {
  4773. temp_height = target->height - y;
  4774. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (temp_height << 16)));
  4775. }
  4776. else {
  4777. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (height << 16)));
  4778. }
  4779. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  4780. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  4781. } else {
  4782. VG_LITE_RETURN_ERROR(push_data(&s_context, path->path_length, path->path));
  4783. #if gcFEATURE_VG_512_PARALLEL_PATHS
  4784. s_context.path_counter ++;
  4785. if (parallel_workpaths1 == s_context.path_counter) {
  4786. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  4787. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  4788. s_context.path_counter = 0;
  4789. }
  4790. #endif
  4791. }
  4792. }
  4793. }
  4794. if (path->path_type == VG_LITE_DRAW_STROKE_PATH || path->path_type == VG_LITE_DRAW_FILL_STROKE_PATH) {
  4795. for (y = point_min.y; y < point_max.y; y += height) {
  4796. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A1B, 0x00011000));
  4797. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3D, tessellation_size / 64));
  4798. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A39, point_min.x | (y << 16)));
  4799. if (y + height > target->height) {
  4800. temp_height = target->height - y;
  4801. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (temp_height << 16)));
  4802. }
  4803. else {
  4804. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A3A, width | (height << 16)));
  4805. }
  4806. if (VLM_PATH_GET_UPLOAD_BIT(*path) == 1) {
  4807. VG_LITE_RETURN_ERROR(push_call(&s_context, path->uploaded.address, path->uploaded.bytes));
  4808. } else {
  4809. format = convert_path_format(VG_LITE_FP32);
  4810. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0x01000200 | format | quality | tiling | 0x0));
  4811. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A02, path->stroke_color));
  4812. VG_LITE_RETURN_ERROR(push_data(&s_context, path->stroke_size, path->stroke_path));
  4813. #if gcFEATURE_VG_512_PARALLEL_PATHS
  4814. s_context.path_counter ++;
  4815. if (parallel_workpaths1 == s_context.path_counter) {
  4816. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0E02, 0x10 | (0x7 << 8)));
  4817. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0F00, 0x10 | (0x7 << 8)));
  4818. s_context.path_counter = 0;
  4819. }
  4820. #endif
  4821. }
  4822. }
  4823. }
  4824. }
  4825. #endif
  4826. /* Finialize command buffer. */
  4827. VG_LITE_RETURN_ERROR(push_state(&s_context, 0x0A34, 0));
  4828. #if gcFEATURE_VG_GLOBAL_ALPHA
  4829. if (blend >= VG_LITE_BLEND_NORMAL_LVGL && blend <= VG_LITE_BLEND_MULTIPLY_LVGL) {
  4830. VG_LITE_RETURN_ERROR(vg_lite_dest_global_alpha(VG_LITE_NORMAL, 0xFF));
  4831. }
  4832. #endif
  4833. vglitemDUMP_BUFFER("image", (size_t)source->address, source->memory, 0, (source->stride)*(source->height));
  4834. #if DUMP_IMAGE
  4835. dump_img(source->memory, source->width, source->height, source->format);
  4836. #endif
  4837. return error;
  4838. #else
  4839. return VG_LITE_NOT_SUPPORT;
  4840. #endif
  4841. }
  4842. #endif /* (CHIPID==0x355 || CHIPID==0x255) */
  4843. /* GC555/GC355/GC255 vg_lite_draw_grad API implementation
  4844. */
  4845. vg_lite_error_t vg_lite_draw_grad(vg_lite_buffer_t* target,
  4846. vg_lite_path_t* path,
  4847. vg_lite_fill_t fill_rule,
  4848. vg_lite_matrix_t* matrix,
  4849. vg_lite_linear_gradient_t* grad,
  4850. vg_lite_blend_t blend)
  4851. {
  4852. #if DUMP_API
  4853. FUNC_DUMP(vg_lite_draw_grad)(target, path, fill_rule, matrix, grad, blend);
  4854. #endif
  4855. return vg_lite_draw_pattern(target, path, fill_rule, matrix,
  4856. &grad->image, &grad->matrix, blend, VG_LITE_PATTERN_PAD, 0, 0, VG_LITE_FILTER_LINEAR);
  4857. }